+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --add-peerdirs-tests all --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -A --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.2KVcGl5joe --bazel-remote-put --dist-cache-max-file-size=209715200 --dist-cache-evict-test-runs -A --retest ydb --build-custom-json=/home/runner/actions_runner/_work/ydb/ydb/graph.json --custom-context=/home/runner/actions_runner/_work/ydb/ydb/context.json --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 0.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a | 0.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a | 0.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms | 0.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} | 0.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests | 0.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large | 0.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut | 0.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build | 1.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut | 1.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} | 2.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut | 2.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a | 2.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a | 2.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb | 2.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a | 2.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} | 3.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut | 3.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a | 3.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a | 3.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning | 3.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 3.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a | 3.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete | 4.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a | 4.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 4.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a | 4.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal | 4.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload_topic | 4.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 4.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data | 4.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a | 4.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts | 4.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain | 4.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large | 4.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg | 4.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a | 4.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut | 5.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/libydb-core-util.a | 5.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} | 5.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/libydb-core-health_check.a | 5.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut | 5.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_s3_writer/ydb-core-tx-replication-service-ut_s3_writer | 5.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} | 5.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut | 5.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica | 5.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} | 5.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 5.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/large/ydb-tests-olap-large | 5.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf | 5.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} | 5.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} | 5.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut | 5.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a | 5.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart | 6.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut | 6.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a | 6.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} | 6.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a | 6.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx | 6.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas | 5.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a | 5.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication | 5.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe | 5.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut | 5.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/ut/libs-storage-partition_direct-ut | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a | 5.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks | 6.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher | 6.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile | 6.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests | 6.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/operations/ydb-core-kqp-ut-olap-operations | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/libydb-core-testlib.a | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/libydb-core-viewer.a | 6.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a | 6.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb | 6.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut | 6.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} | 6.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init | 6.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a | 6.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp | 6.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/libydb-core-wrappers.a | 6.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/libydb-services-ydb.a | 6.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a | 6.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut | 6.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut | 6.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut | 6.5%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs | 6.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat | 6.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record | 6.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut | 6.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy | 6.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} | 6.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} | 6.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut | 6.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/workload_log | 6.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} | 7.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/create/ydb-tests-olap-column_compression-create | 7.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} | 7.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut | 7.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join | 7.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 7.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring | 7.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_replication/ydb-core-tx-replication-controller-ut_replication | 7.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut | 7.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 7.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} | 7.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer | 7.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} | 6.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut | 6.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut | 6.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} | 6.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a | 7.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 | 7.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime | 7.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard | 7.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost | 7.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 7.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a | 7.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp | 7.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp | 7.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp | 7.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp | 7.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp | 7.7%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a | 7.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a | 8.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a | 8.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_counters.cpp | 8.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/probes.cpp | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a | 8.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp | 8.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp | 8.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_s3fifo.h_serialized.cpp | 8.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle.cpp | 8.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp | 8.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp | 8.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp | 8.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_events.h_serialized.cpp | 8.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/services.cpp | 8.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/runtime.cpp | 8.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a | 8.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tx_helpers.cpp | 8.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/yql_testlib/yql_testlib.cpp | 9.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp | 9.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table.cpp | 9.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy.cpp | 9.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp | 9.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp | 9.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp | 9.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp | 9.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp | 9.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_sessions_describe.cpp | 9.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a | 9.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control_internals.cpp | 9.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a | 9.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a | 9.9%| PREPARE $(CLANG_FORMAT-3815817643) | 9.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/request_discriminator.cpp |10.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control.cpp |10.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |10.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check.cpp |10.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/metrics_actor.cpp |10.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |10.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |10.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_get_blob_page.cpp |10.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/drivedata_serializer.cpp |10.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |10.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler.cpp |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |10.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast.cpp |10.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_merge.cpp |10.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_aggregate.cpp |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |10.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_filter.cpp |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |11.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/unavailable_storage.cpp |11.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |11.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |11.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |11.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |11.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |11.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |11.8%| RESOURCE $(sbr:4966407557) |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |11.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/actors/ut_service/common/libactors-ut_service-common.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/clients/file/libgateway-clients-file.a |12.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor.cpp |12.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |12.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |12.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |12.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |12.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid.cpp |12.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/blake2/libcontrib-libs-blake2.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |12.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |12.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |12.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |13.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |13.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |13.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |13.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |13.2%| PREPARE $(PYTHON) - 50.35 MB |13.3%| PREPARE $(YMAKE_PYTHON3-212672652) - 8.03 MB |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |13.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |13.3%| PREPARE $(BLACK-1902277502) - 9.24 MB |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |13.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |13.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |13.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_observer.cpp |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |13.5%| PREPARE $(FLAKE8_PY2-2255386470) - 5.11 MB |13.6%| PREPARE $(FLAKE8_PY3-995296992) - 19.54 MB |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |13.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part.cpp |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |13.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |13.7%| PREPARE $(RUFF-1833214467) - 8.42 MB |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |13.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |14.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |14.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |14.1%| PREPARE $(YMAKE_PYTHON3) |14.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |14.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload_kv |14.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/actors/ut_service/common/helpers.cpp |14.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/c664ef6ca80e747b410e1da324_raw.auxcpp |14.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |14.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |14.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_0c451aebc6dafbdf0d9da2ab02.o |14.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_03f75cad4510fd9d018635026c.o |14.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_52d3e6a0651990fc997ab40ba2.o |14.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_64bde13108f9284b2e9f0bbb7a.o |14.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dc1e8788b8287c02880cfe2814.o |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |15.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |14.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |13.7%| COMPACTING CACHE 23.9MiB |15.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |16.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |24.8%| PREPARE $(CLANG-1922233694) - 209.73 MB |25.8%| PREPARE $(CLANG16-1380963495) - 307.75 MB |26.4%| PREPARE $(CLANG18-1866954364) - 302.01 MB |27.0%| PREPARE $(CLANG-2403293607) - 302.01 MB |26.9%| PREPARE $(CLANG20-882982600) |27.8%| PREPARE $(WITH_JDK-sbr:9470949154) - 184.57 MB |27.9%| PREPARE $(JDK_DEFAULT-2548586558) |27.6%| PREPARE $(WITH_JDK17-sbr:9470949154) |27.6%| PREPARE $(JDK17-2548586558) |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |29.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |33.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |35.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |35.6%| PREPARE $(VCS) |35.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |35.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |35.8%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |35.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/base/libpublic-lib-base.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |35.9%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |36.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |36.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/auth/ydb-public-sdk-cpp-tests-integration-auth |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |36.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |36.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |37.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |37.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |37.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |37.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/base/msgbus.cpp |37.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |37.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |37.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |37.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |37.5%| PREPARE $(TEST_TOOL_HOST-sbr:11121628785) - 25.18 MB |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |37.7%| PREPARE $(LLD_ROOT-3107549726) - 33.57 MB |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |38.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/client.cpp |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/cow_string/libcpp-containers-cow_string.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |39.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |40.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |40.2%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |40.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |40.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/auth/libydb-services-auth.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |40.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |40.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bridge/libydb-services-bridge.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |40.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/backup/libydb-services-backup.a |40.2%| [CP] {default-linux-x86_64, release, asan} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |40.3%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |40.3%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |40.3%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |40.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/libydb-services-config.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |40.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/libydb-services-cms.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |40.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan/libclang_rt.asan-x86_64.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |40.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/auth/grpc_service.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |40.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/bridge/grpc_service.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |40.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |40.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |40.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/grpc_service.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/grpc_service.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/grpc_service.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |40.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |40.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/discovery/libydb-services-discovery.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |41.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/put_records_actor.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |41.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/backup/grpc_service.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |40.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |40.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/libydb-services-fq.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/kesus/libydb-services-kesus.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |41.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/nbs/libydb-services-nbs.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ymq/libydb-services-ymq.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/libydb-services-metadata.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |40.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |40.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |40.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |41.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |41.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |41.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |41.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/view/libydb-services-view.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |41.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |41.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |41.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/replication/libydb-services-replication.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/tablet/libydb-services-tablet.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |41.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/local_discovery/grpc_service.cpp |41.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/discovery/grpc_service.cpp |41.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |41.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/next_token.cpp |41.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/lib/actors/consumers_advanced_monitoring_settings.cpp |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/shard_iterator.cpp |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/grpc_service.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/grpc_service.cpp |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/private_grpc.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ydb_over_fq.cpp |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/kesus/grpc_service.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/aws.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/nbs/grpc_service.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/grpc_service.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/ymq_proxy.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/service.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/maintenance/grpc_service.cpp |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/common.cpp |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/manager.cpp |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/monitoring/grpc_service.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/snapshot.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/initializer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/object.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/fetcher.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/behaviour.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |42.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/statuses.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/set_queue_attributes.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/send_message.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/receive_message.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/purge_queue.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/list_queues.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/get_queue_attributes.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/test_shard/grpc_service.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/delete_message.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/error.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/protos/receipt/libsqs_topic-protos-receipt.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/change_message_visibility.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic.cpp |42.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/actor.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/create_queue.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/tablet/ydb_tablet.cpp |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/grpc_service.cpp |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/replication/grpc_service.cpp |42.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/utils.cpp |42.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_access.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |42.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret.cpp |42.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/manager.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/fetcher.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/view/grpc_service.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_secret.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/initializer.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/snapshot.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |41.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |41.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ymq/utils.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |41.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |41.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |41.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |41.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/sqs_topic/receipt.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/codecs.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/helpers.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/events.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/sqs_topic/consumer_attributes.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/gen_step.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/tools/testing/loadtest/lib/protos/libpy3loadtest-lib-protos.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/tools/testing/loadtest/lib/protos/libloadtest-lib-protos.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/holder/libsqs_topic-queue_url-holder.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |43.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |43.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |43.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |41.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |41.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |41.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |41.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |42.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |42.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |42.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |42.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |42.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |42.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |42.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |42.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |42.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor.cpp |42.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |42.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |42.8%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |42.9%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |42.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |43.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |43.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |43.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |43.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |43.0%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |43.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/temp_files/libyt-lib-temp_files.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_token_resolver/libyt-lib-yt_token_resolver.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/exceptions.cpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_file_download/libyt-lib-yt_file_download.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/global.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/read_table_impl.h_serialized.cpp |43.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_apply.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/mon.cpp |43.6%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_broker.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_backup.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/defs.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_counters.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |43.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_table.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_scheme.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_create.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_comp_gen.h_serialized.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_snapshot.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_seat.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_slice.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_tx_env.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_gen.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_borrowlogic.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_compaction_logic.h_serialized.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_database.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_counters.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_outset.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_recovery.h_serialized.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_page_label.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_fwd_misc.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_page_iface.h_serialized.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_part_loader.h_serialized.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.cc |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_range.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_index_iter_create.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_mem_warm.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_recovery.cpp |44.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_dump.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions.cpp |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_part_group_iter_create.cpp |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_row_eggs.h_serialized.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_create.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_overlay.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_scan_iface.h_serialized.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausage_meta.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_committed.cpp |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausagecache.cpp |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_store_hotdog.cpp |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index.cpp |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_misc.cpp |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index_histogram.cpp |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/port_manager/libpy3library-python-port_manager.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/group_stat_aggregator.cpp |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_hugeblobctx.cpp |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histogram_latency.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_handle_class.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histograms.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_performance_params.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/objcopy_e9d954952def1b899e1fb63731.o |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/libpy3workload_topic.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cpuinfo.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/failure_injection.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |45.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |45.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/memory_tracker.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/describe.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/wait_events.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/backoff.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/discovery_actor.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache.cpp |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/console.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/format.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap.cpp |46.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_factory.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/text.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_actors.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/source_location.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/random.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_req.cpp |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/grpc_service.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id.cpp |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_service.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snowball/libcontrib-libs-snowball.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |46.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_ee633aebbed65e39e09fbf4271.o |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/exceptions_mapping.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_add.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_state_name_to_int.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_group_path_struct.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor_sql.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/kafka.h_serialized.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_protocol.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_init.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages_int.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_records.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |46.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages.cpp |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_init_schema.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/version/libversion_definition.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |46.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |46.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/libydb-core-audit.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/libcore-base-generated.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/libydb-core-base.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_db.cpp |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |46.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/events.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_runtime.cpp |46.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/probes.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/schema.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/subscription/libthreading-future-subscription.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_dummy.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_config_get.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_html.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ddisk/libcore-blobstorage-ddisk.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_impl.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |46.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |46.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_blob_range_page.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_release.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_delete.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_describe.cpp |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_update.cpp |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/block_events.cpp |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |46.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_delete.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_create.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_acquire.cpp |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/testing.cpp |46.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_describe.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_attach.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_update.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_detach.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_guardian.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_destroy.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/regexp/libcore-backup-regexp.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_lookup.cpp |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/status.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/s3.cpp |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/read.cpp |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/query.cpp |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/proxy.cpp |47.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_connect.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_chunks.cpp |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_boot.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/metrics.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/agent.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/garbage.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blocks.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_sync.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/segment_manager.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_read_write.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_persistent_buffer.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/long_timer.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ddisk.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/comm.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/read_http_reply_protocol.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/request.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/name_service_client_protocol.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/backtrace.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/feature_flags_service.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/event_filter.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/fulltext.cpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/domain.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/appdata.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogneighbors.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/bridge.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_replica.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_publish.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ddisk/persistent_buffer_space_allocator.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_monitoring.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/wilson_tracing_control.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_replica.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_event_filter.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_proxy.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/event.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/tablet_killer.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/actor_activity_names.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_relevance.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_apply_config.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blocks.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_gc.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/coro_tx.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_load.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/assimilator.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blob_depot.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_decommit.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/garbage_collection.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/mon_main.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_resolve.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_mon.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_uncertain.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_trash.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_scan.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_delete.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_upload.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_init_schema.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_write.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_load.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/space_monitor.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogformat.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |47.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/libcore-client-server.a |47.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/libcore-cms-console.a |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/util/libcms-console-util.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage.cpp |47.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |47.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/libydb-core-cms.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |47.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/libcore-config-validation.a |47.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/libcore-config-init.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/boot_type.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/libcore-control-lib.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/libydb-core-control.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/discovery/libydb-core-discovery.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/libversion.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/librun.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |47.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/boot_type.h_serialized.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |47.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/counters.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/html.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |47.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/services_assert.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/local_user_token.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/pool_stats_collector.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/memory_controller_iface.h_serialized.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/group_stat.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/row_version.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_syncstate.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb.cpp |47.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_vdiskid.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/path.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/vdisk_priorities.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/table_index.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/kmeans_clusters.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/traceid.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet_status_checker.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/storage_pools.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tx_processing.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/subdomain.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/common_helper.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/appdata.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob.cpp |48.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |48.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_server.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/http.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_api_handler.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/erasure_checkers.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/dummy.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime.cpp |48.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_delayed_cost_loop.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_defs.h_serialized.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_drivemodel_db.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/main.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_flightcontrol.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_signal_event.cpp |48.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_params.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |48.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/factories.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_parser.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_atomicblockcounter.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/printer/libformats-arrow-printer.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |48.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |48.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/storage_helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actor_helpers.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |48.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/db_key_resolver.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_context.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_load_state.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__create_tenant.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__configure.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_get.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/result_writer.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/table_bindings_from_bindings.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_console.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_service.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/gc.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_http_server.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/permutations.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/converter.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_types.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/process_columns.cpp |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_config.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/api_adapters.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__init_scheme.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__load_state.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms.cpp |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/discovery/discovery.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/version.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/info_collector.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/http.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/logger.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/node_checkers.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_noop.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/monitoring_config_validator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__set_config.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/core_validators.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_configdummy.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_provider.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util/config_index.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_manager.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_handshake.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/config_helpers.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/logger.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/ic_nodes_cache_service.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |49.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/fake_coordinator.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/cs_helper.cpp |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pq.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_topic_data.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tenant_runtime.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_helpers.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/test_client.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_operation.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_browse.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_wb_req.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/audit_log.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_query.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |49.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |48.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_pipe_req.cpp |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_request.cpp |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |49.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_result.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/probes.cpp |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |49.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_filter.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers_minikql.cpp |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |49.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_batch_builder.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |49.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/actors/libkqp-federated_query-actors.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/clusters_from_connections.cpp |49.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_health_check.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/error.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/special_keys.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_manager.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter_resources.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/size_calcer.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_result_write.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_state_storage.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/validation_functions.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_checkpoint_storage.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_factory.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/cli_utils/melancholic_gopher.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/services.h_serialized.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/services.cpp |49.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/minikql/minikql_engine_host.cpp |49.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/grpc_library_helper.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/health/health.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/init/init.cpp |49.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/logs/log.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_audit.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/tx_processor.cpp |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/local_leader_election.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_login.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_startup.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cms.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |49.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_backup.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/response_tasks.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_logins.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_helper.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/control_plane_storage_counters.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/operation_helpers.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/probes.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/request_validators.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_config.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-sqs/liblibs-aws-sdk-cpp-aws-cpp-sdk-sqs.a |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_export.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/util.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_nbs.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_ping.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_replication.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/table_settings.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_view.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/probes.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/actors_factory.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/counters/counters.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_log.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/shard_impl.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/local_session.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/util.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_nbs_io.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/actors/kqp_federated_query_actors.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/shard_key_ranges.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |50.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |50.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |50.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/physical_convertion/libopt-rbo-physical_convertion.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |50.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |50.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/rules/libopt-rbo-rules.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |50.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/fs_path_validation.cpp |50.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |50.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/libydb-core-load_test.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/local_proxy/local_pq_client/libcore-local_proxy-local_pq_client.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/libydb-core-mind.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/libydb-core-mon.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |49.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/liblibs-storage-partition_direct.a |49.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/liblibs-storage-storage_transport.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |49.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/local_table_client.cpp |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |49.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/common/utils.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |49.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/schema.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_transform.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_translate.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_runner.cpp |49.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/sdk_table_client.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ydb_local_connection.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/sdk_session.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ydb_sdk_connection.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events_stats.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/backends.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_query_text_cache_service.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_stat.cpp |50.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_write_session.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_read_session.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |50.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/memory.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client_helpers.cpp |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/bootstrap/libcloud-blockstore-bootstrap.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_log.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/common/libblockstore-libs-common.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/libcommands-interactive-ai.a |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/nbs2_load_actor.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/config/libcloud-blockstore-config.a |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/load_actor_adapter.cpp |50.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_actor.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/api/liblibs-storage-api.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/fast_path_service.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group_in_mem.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/diagnostics/libblockstore-libs-diagnostics.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/models/libinteractive-ai-models.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/kikimr/libblockstore-libs-kikimr.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/session/libcommands-interactive-session.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/tools/testing/loadtest/lib/libtesting-loadtest-lib.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/tools/libinteractive-ai-tools.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |50.5%| [CP] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/libs/common/libcore-libs-common.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/contrib/vhost/libvhost-server.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/libs/diagnostics/libcore-libs-diagnostics.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/sqs_json/libsqs_json.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/service/libblockstore-libs-service.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/public/api/protos/libpublic-api-protos.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/vhost/libblockstore-libs-vhost.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/common/actor.cpp |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/libstorage-core-protos.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_metrics.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_statics.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_log.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_domains.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/fill.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/drain.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/domain_info.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/balancer.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/sourceid_info.h_serialized.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/boot_queue.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/common/tracing_support.cpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__register_node.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/common_app.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/autopartitioning_manager.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__load_state.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/common/libcommands-interactive-common.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/libsqs_workload.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/scrub.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/counters.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/shred.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/util.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/migrate.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/net_classifier.h_serialized.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/get_group.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ddisk.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/register_node.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/node_report.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_ddisk.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bsc.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/labels_maintainer.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bridge.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/lease_holder.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/local.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/bsc_audit.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/aggregated_result.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/archive.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ycsb/info_collector.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/common.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ddisk_write.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__register_node.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_column_statistics_utils.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_balancer.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/keyvalue_write.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_proxy/local_pq_client/local_federated_topic_client.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/ic_storage_transport.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/fix_transaction_states.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__set_down.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/reassign_actor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/monitor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/node_info.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/profiler.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/internal.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_group_info.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_pool.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_info.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/kqp.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__status.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/stats.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/group_write.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/monitoring.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |51.7%| PREPARE $(GDB) - 25.18 MB |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_write.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_read.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/events.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/vdisk_write.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/service_actor.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/yql_single_query.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend_build.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_host.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/crossref.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/out/libcore-protos-out.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/libydb-core-public_http.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/libydb-core-quoter.a |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/dynamic.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager_graph_cmp.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_actor.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/config_examples.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_util.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/heartbeat.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/percentiles.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_task.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_denylist.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/key.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/snapshot.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_fulltext_analyze.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/interconnect_load.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client_factory.cpp |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/libydb-core-security.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/slot_indexes_pool.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/memory_info.cpp |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window.cpp |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/libydb-core-tablet.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/transfer/libydb-core-transfer.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/quoter_base.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/percentile.h_serialized.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_app.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/request.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/sql_parser.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_join_helpers.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_service.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_req.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_purger.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/writer.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_actors.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/utils.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/config.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_page.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/retro_tracing_impl/libydb-core-retro_tracing_impl.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/sasl/plain_ldap_auth_proxy_actor.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/scan.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/db_counters.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/tablets/tablets.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/groups.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_delete.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_metrics.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_reset.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/vslots.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_whiteboard.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/libbackup-import-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_sys.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/pdisks.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/sysview_service.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/ext_counters.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/group_members.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/users.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/nodes/nodes.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service_impl.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/groups.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/permissions.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/owners.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/sasl/scram_auth_actor.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/write_id.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/database.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/sasl/base_auth_actors.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_metrics.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/sasl/plain_auth_actor.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_input.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/column_table.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/transfer_writer.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_output.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/row_table.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/scheme.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_initialize.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_tablet.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/state_server_interface.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_state.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_write.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/inflight_limiter.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/grpc_request_context_wrapper.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_context.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/debug_info.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router.cpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/probes.cpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/pq_database.cpp |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_processor.cpp |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/select_builder.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/column_statistic_eval.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/db_counters.cpp |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_db_counters.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_counters_merger.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/simple_tablet.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters.cpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_app.cpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache.cpp |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_client_cache.cpp |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_protobuf.cpp |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_server.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_interval.cpp |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/processor/schema.cpp |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/aggregated_counters.cpp |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/login_shared_func.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_tracing_signals.cpp |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/sasl/hasher.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/schema.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/http_request.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/sasl/static_credentials_provider.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |52.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_d3749b17b0bc2057695c3e736a.o |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_streaming.py::flake8 [GOOD] >> test_udfs.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.global.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/filter.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__init.cpp |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/execute_queue.cpp |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/libydb-core-tx.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/write_actor.cpp |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/program/libcore-tx-program.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |50.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |50.8%| [TS] ydb/tests/fq/streaming/flake8 >> test_watermarks.py::flake8 [GOOD] |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/settings.cpp |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunk_meta.cpp |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |50.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/lwtrace_probes.cpp |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service.cpp |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/time_counters.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/locks/locks.cpp |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/stage_features.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/events.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/error/error.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/abstract.cpp |51.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/locks_db.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/process.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |51.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_6bdc69403f0fa7c03edbf19c28.o |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/attributes/libcore-ymq-attributes.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/load_test.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/helpers.cpp |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/events.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/backup.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/fake_storage_config.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/worker.h_serialized.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/topic_message.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/session_info.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/two_part_description.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/subscriber.h_serialized.cpp |51.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_consumer_remover.cpp |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/manager.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/sharding.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_slider.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/random.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/fetcher.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/scheme_cache.h_serialized.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/run_query.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/queue_attributes.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/events_writer.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/action.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/xml.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/types.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/unboxed_reader.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/hash.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/http.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/counters.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/common.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/queue_id.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/query_id.h_serialized.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/probes.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/xml_builder.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/cloud_enums.h_serialized.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/parser.rl6.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/acl.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__create.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__progress.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/retro_tracing/liblibrary-actors-retro_tracing.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_truncate_table.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |51.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_helpers.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/std/queries.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/std/schema.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/tenant_resolver.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_table.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_base.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/sys_params.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_transfer.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_with_stream.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/logging.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_remover.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/controller.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/s3_writer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/service.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/lag_provider.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/event_util.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/replication.h_serialized.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/replication.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/private_events.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/upload_stats.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/resource_id_resolver.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/fifo/schema.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/truncate_unit.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy_schemereq.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/cq_actor/libinterconnect-rdma-cq_actor.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/counters.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/message_seqno.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/ids.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/group.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/restore_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_locks.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/allocation.h_serialized.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/allocation.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/operation.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/key_validator.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_scan.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_common.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_download.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__write.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__init.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/backup_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_hooks.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execution_unit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/import_s3.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/follower_edge.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/defs.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__tx_abort.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/fake_storage.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/background_controller.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scheme.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/fs_storage.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/abstract.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_debug.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/s3_storage_config.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/sasl/liblibrary-login-sasl.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_export.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_dummy.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/signals/libydb-library-signals.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_operation.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/hashes_checker/liblibrary-login-hashes_checker.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/fs_storage_config.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_storage.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/retry_policy.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/defs.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/service/service.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/signals/owner.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/config.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/events.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/service.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |54.7%| [TS] {RESULT} ydb/tests/fq/streaming/flake8 |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.cc |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/state/dq_state_load_plan.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_io_discovery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_dq_integration.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_optimize.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider_impl.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_integration.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_execution.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_physical_optimize.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_load_meta.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_type_ann.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource_type_ann.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_io_discovery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_mkql_compiler.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_logical_opt.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_settings.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/private.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/counters.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |54.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/objcopy_691607a9cbabb8d8c0161d1a6d.o |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/libpy3workload_kv.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/client.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/states.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/agent.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/abstract.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/events.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/object_counter.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/histogram.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.cc |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/transfer |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ydb-core-tx-schemeshard-ut_truncate_table_simple |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/node_broker |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/show_create_view |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/show_create_table |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tracing/libydb-core-tracing.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_topic_key_parser.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/streaming |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/backup_stress |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/state/ut/ydb-library-yql-dq-state-ut |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor_base.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ut_truncate_table_simple.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tracing/tablet_info.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/actors/http/http_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/state/ut/dq_state_load_plan_ut.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_632c8f121033cba272f8b156b6.o |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/clients/composite/libgateway-clients-composite.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/libpy3transfer.global.a |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/objcopy_b632f28ee823f938d14c0e85f9.o |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |55.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/config-meta.json |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_truncate/ydb-core-tx-datashard-ut_truncate |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_meta_extractor.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/async_io/probes.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/libpy3node_broker.global.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/objcopy_2a9fba044b5f98d2ff5f5c7f44.o |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/objcopy_9ccdc4f01b578a43bc35d4d519.o |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/libpy3show_create_view.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/libpy3show_create_table.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/objcopy_970514ee5aa7605a49b54b8feb.o |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace.cpp |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/objcopy_33061e6ec0580baa7f93f1ce23.o |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/objcopy_06a563d04a0e897145ef1061d2.o |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace_collection.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/http.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker_ut.cpp |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_8e0f2cf91b35e6051ad82535a4.o |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/objectwithstate.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/workload/libpy3stress-viewer-workload.global.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/ctas |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e55498abceca534315a6428452.o |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e508a8abac843a0a0f92fc62eb.o |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/workload/libpy3stress-streaming-workload.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/libpy3streaming.global.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/objcopy_1ac33161075322a9a0efb211c2.o |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/libpy3backup_stress.global.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/objcopy_4f24d349611ecfcf19e3001adb.o |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/workload/libpy3stress-backup-workload.global.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/http/ut/tls_client_connection.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_truncate/datashard_ut_truncate.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_cf5836766ac30ca7ea957ce368.o |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/metadata_ut.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/blob_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/database_resolver_mock.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/defaults.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta_versions.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/libpy3workload_topic_kafka.global.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/type_codecs_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/operations/write_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/operations/delete_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |55.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_899316667b8914fe8ec3af85d9.o |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/s3_writer_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/workload/libpy3show_create-table-workload.global.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_daba02a22b66dd174e40603586.o |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/libpy3workload_testshard.global.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/encryption_ut.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/objcopy_8d2ea3c78a255bb4c87c2fc54a.o |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/control.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/ut_helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_de67ee476035f2cc7c8d34c996.o |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_a3b27567b6c6c0a98826db6a0d.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |55.5%| [BI] {tool} $(B)/library/cpp/build_info/buildinfo_data.h |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_1815f02732d96389c328f04d90.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_f0d8fb718a757998dc9403df32.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_df04396057094f2483296b9dbe.o |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/mvp.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |55.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/libpy3ctas.global.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/objcopy_3cb499a0fcc9aa014af2855233.o |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/clients/external/libgateway-clients-external.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/workload/libpy3stress-ctas-workload.global.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/metadata/ut/functions_metadata_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydbd/main.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_row_builder.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/context.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/cracked_page.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_yandex.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_final.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_client.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_cleanup_page.cpp |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_handler.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_manager.cpp |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_whoami.cpp |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |55.7%| [CC] {tool} $(S)/library/cpp/svnversion/svn_interface.c |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_handler.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_start_page_nebius.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page.cpp |55.6%| [CC] {tool} $(B)/library/cpp/build_info/build_info.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_stop_page_nebius.cpp |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hulloptlsn.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/bsconfig_ut.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_yandex.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_nebius.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/libydb-mvp-core.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_settings.cpp |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_nebius.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |55.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |55.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_2983697b65fe3fbc437e0acf1f.o |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |55.6%| [CC] {tool} $(S)/library/cpp/svnversion/svnversion.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_aac602e1ee9075b2856f1975ca.o |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_771af70678639da3d434ea1416.o |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |55.7%| [CC] {tool} $(B)/library/cpp/build_info/sandbox.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv_volume/tests/ydb-tests-stress-kv_volume-tests |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |55.7%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/workload/libpy3stress-log-workload.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/cdc |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydbd/export/libapps-ydbd-export.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/partition_key_range/partition_key_range_sequence_ut.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydb.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/statistics_internal/libstatistics_internal_udf.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydbc.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |55.8%| [CC] {tool} $(S)/library/cpp/build_info/build_info_static.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queues.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_users.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_287a0728f8b1ad204ac0396eb2.o |55.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_c96c333b4f7fc5cb2b98b27907.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_65ca1c9a2830c14c52b6f356c1.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/change_visibility.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_purger_ut.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_e6184a39b8332c221c5cda3c2f.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_609c2613d8f9c513602350c6a8.o |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_queue.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_user.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_queue.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_factory.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_group/main.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_message.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_ffc5f76f7501b8251738448541.o |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_user.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/error.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/libpy3workload_log.global.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/objcopy_b61dd8e76e6166fc1abc85e60e.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/create/objcopy_6f878f770dd4c6f7c5bf747754.o |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/common/libpy3olap-column_compression-common.global.a |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/create/objcopy_1695018355cc65fd2ff1b14966.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/create/objcopy_5a5ac431f81726375a5eac4604.o |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/external_table_description.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_benches_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/column_families.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_permissions.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/appdata.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_profiles.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_settings.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/topic_description.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/parser.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_mem_profiler.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/actor.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/tag_queue.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/merger.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/reducer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_service.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/cache_policy.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/filter.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/node_tracker.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/monitoring.cpp |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge_queue.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge.cpp |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_security_printer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_leader.cpp |56.0%| [PD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_startup_options.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/send_message.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/retention.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/local_rate_limiter_allocator.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/schema.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_schema.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/receive_message.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/count_queues.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/service.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/metering.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/untag_queue.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/executor.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/actors/ut_service/fast/core-kqp-federated_query-actors-ut_service-fast |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/result_set_format |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv_volume/tests/objcopy_c4baafa2b7c7755afe1c4d6adf.o |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv_volume/tests/objcopy_5ebaf4d4fc4ad09c038ead7bc6.o |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv_volume/tests/objcopy_cd731e80de9ecbc677a1c58109.o |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/objcopy_7d7339f4588397fc771e31030c.o |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/libpy3cdc.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/libpy3workload_mixed.global.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_simple_db_flat.cpp |55.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_c0a0299090f532c29f2ee6dc87.o |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_helpers.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_data.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collect_operation.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_stored_state_data.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |55.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/infly.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/replication_description.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/external_data_source_description.cpp |55.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/kesus_description.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/auth_mocks.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/log.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/message_delay_stats.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |55.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/sha256.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/user_settings_names.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |55.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mapper.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_generator.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/replication_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_linear.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv_volume/workload_keyvalue_volume |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/abstract.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/impl/libfile-metadata-impl.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/impl/libfmr-job_preparer-impl.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/libpy3stress-result_set_format-workload.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/type/libpy3result_set_format-workload-type.global.a |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/objcopy_ab0068aecc73dfe57505e1cc57.o |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_2650fc91e1b7439f48496ea51e.o |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_service.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/quota.{pb.h ... grpc.pb.h} |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |56.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/actors/ut_service/fast/kqp_federated_query_actors_ut.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_6dc907e08e2d6bd66a6c998081.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_4d329553dae503930cad7a3eec.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/interface/libfile-upload-interface.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_51c4be7ece033a38cc2a2dfb64.o |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/interface/libfile-metadata-interface.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_ab542bc0d25a481768cec1ad31.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_f022524c80d73c5ff5d59211dd.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/libpy3result_set_format.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/impl/libfile-upload-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/libpy3kafka_streams_test.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_preparer/interface/libfmr-job_preparer-interface.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/objcopy_e0d6c3883613601e455029f31f.o |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |56.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/workload/objcopy_43f03d8ce0f7a500b6680c8688.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/comparator/libfmr-utils-comparator.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/yson_block_iterator/impl/libutils-yson_block_iterator-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_narrow_map.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/objcopy_e818dcbb59134fffc2af5b1d20.o |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/task.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/cursor.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |56.5%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/workload/ydb_cli |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |56.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/objcopy_fecdbbd3b168ee83928a6be126.o |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/objcopy_108c71299bb0fd7f230af129b0.o |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_parser/enum_parser |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_partitions.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_layers_integration.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |56.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_49e50404ca7d8e93240b3617b2.o |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mutdict.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v1.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scheme_v1.{pb.h ... grpc.pb.h} |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v2.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/arrow_builders.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |56.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload/objcopy_d6a1b41fc15f14862f8ec129ed.o |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/manager.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/606f468a87470aa2234a8d54ad_raw.auxcpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/tracked_memory_chunk_provider.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/cloud_user.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_copy_table.cpp |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scripting_v1.{pb.h ... grpc.pb.h} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/5c07c2b0e4aa6f06e69ef1a092_raw.auxcpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d2d04b008d9078a63d7bcd16b2_raw.auxcpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/59d94d9e926a2418ec46332230_raw.auxcpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/06d9ba434360fff64a207e5dc3_raw.auxcpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/5475352289bdcf6a74b369d2be_raw.auxcpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/console_service.{pb.h ... grpc.pb.h} |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/metadata.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/5f923d90c670ba841c0f01451b_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/755d4a4522918daf6ec35f97d0_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/7b6496bde9b8ab15e3457f1836_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/939f714fb0dbfc194d76e13956_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ac2aacb5d77eaf0084a9093f1f_raw.auxcpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/9f6e258d98e30b0068f6502ae5_raw.auxcpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b2102832031b3ac9b15541f622_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fbc28a71dca227c86ebf0c8b7b_raw.auxcpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d4930a6fcb72080497240b34b7_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_a926d3332cb769ac3e6c9e6e37.o |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_c114cbf6b820d92320c1e2c912.o |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_589315062f5401a368910248f0.o |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_61613f0bd98876f149d8574891.o |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |56.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |56.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |56.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |56.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/ydb_cli |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8dfa9c11a0ed07075156cb0212_raw.auxcpp |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |56.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |56.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/incompatibility_rules_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/02640fb91e2f8257352e812ca8_raw.auxcpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d1c8cd8138e309eb098587d29e_raw.auxcpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ddisk/ut/ydb-core-blobstorage-ddisk-ut |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/unittests.pb.{h, cc} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler_ipc.cpp |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/common.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/tools/decrypt/decrypt |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_log_impl.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/objcopy_0e9ac60eb7f731d367df5e10f1.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/ut/state_storage_follower_ids_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/prioritized_invoker.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/kqp_benches.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action_type.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/http.{pb.h ... grpc.pb.h} |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account.{pb.h ... grpc.pb.h} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/retro_tracing.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/access_service.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_4488a798bff4dfb5b10f31aa13.o |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account_service.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service_subject.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/resource.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/yandex_passport_cookie.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_state_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_7658f3232c7b17d24a0a184809.o |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/kmeans_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/tools/decrypt/main.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/fulltext_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/libpy3tests-library-compatibility.global.a |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_e6cee7a0d001013375fe4474ef.o |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_15b0d0854ad50b151b07652021.o |56.6%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_config/audit_config.cpp |56.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2_grpc.py.p5ju.yapyc3 |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2_grpc.py.p5ju.yapyc3 |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2.py{ ... i} |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_expand_map.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |56.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2_grpc.py.p5ju.yapyc3 |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/vector/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/native.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.{pb.h ... grpc.pb.h} |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |56.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |56.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |56.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |56.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |56.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |56.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/abstract.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/program_constructor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |56.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/dqs.pb.{h, cc} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_keys.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/main.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_d68e1e5b762e412afe6a534487.o |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_b866963286293af0b6f2139fed.o |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_f9b0feecd0e36f08cbf5c53562.o |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/purecalc_filter.cpp |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_5233daad98e22a16d60b4b7b86.o |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_1c931ae3496b883d009f58ef8e.o |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_9aa3ff9cf14537a8e01acbe7ce.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_65ac58c27d43a55d0ea4eda626.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_8e487e54cce16b8e086d7fe332.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_defffa7b24e868ee4421718d41.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/cms_ut.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_24f8afc6706a031414d4ae3804.o |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_315e7b92dbfd85f2e8c34a8312.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_5d54b594376b9bf9ac253dba8e.o |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/forced_compaction.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |56.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.h |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.pb.cc |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/py3cc |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/error.pb.{h, cc} |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/slow/py3cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/public/api/protos/headers.pb.{h, cc} |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_600a00846e81e804b6821a1e7d.o |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_cdb71a03369b35054573fbb9d8.o |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_b44faf459bbd6fc49621c663b8.o |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/trace.pb.{h, cc} |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/ydb-core-blobstorage-ut_blobstorage-ut_ddisk |56.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/config/storage.pb.{h, cc} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_tli.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_labeled.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_counters.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_kqp.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_auth.cpp |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/ut/ydb-core-config-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/common_yson_converters.cpp |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/remove_identity_map.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |56.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/yql_yt_expr_nodes.{gen.h ... defs.inl.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |56.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/immediate_control_board_html_renderer.cpp |56.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |56.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/persistent_buffer_space_allocator.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ddisk.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_stage_graph.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_84b6c628d15e9e575bec5be5c5.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_385ba1144ebdaae0c967a41e83.o |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest-timeout/py3/libpy3python-pytest-timeout-py3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_7c9715e23edebba4ffb82d27d5.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/olefile/py3/libpy3python-olefile-py3.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/agg24-svn/libpy3-extern-agg24-svn.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/ttconv/libpy3py3-extern-ttconv.a |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cycler/py3/libpy3python-cycler-py3.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/ut/main.cpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cppy/libpy3contrib-python-cppy.global.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |56.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |56.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_utils.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/mvp_ut.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/utils/libcore-config-utils.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/sharpyuv/liblibs-libwebp-sharpyuv.a |56.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |56.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/media.pb.{h, cc} |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openjpeg/libcontrib-libs-openjpeg.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/physical/predicate_collector.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/fonttools/libpy3contrib-python-fonttools.global.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/ut/protos/libcore-ut-protos.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |56.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/qhull/libcontrib-libs-qhull.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/ut/mvp_startup_options_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_security_printer_ut.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/codegen/main.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_scheduler_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.a |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |56.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |56.6%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/yql_generic_expr_nodes.{gen.h ... defs.inl.h} |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/session_service.{pb.h ... grpc.pb.h} |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.global.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |56.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |56.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/fq.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/events.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_whoami_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/config.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ut.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |56.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |56.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_ut.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/positional_yson_translation.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/message_id_deduplicator_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_login_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/test_utils/libsecurity-ldap_auth_provider-test_utils.a |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_6af7a7ce8a1ee5e67d75a2978a.o |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_28f172e1aa977d907bdfa0a81b.o |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_04f2935f3ada8eb9d01ebaba6b.o |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/get_value.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libpng/libcontrib-libs-libpng.a |56.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |56.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/objcopy_efd352795aee39d7ac6e163a2d.o |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lcms2/libcontrib-libs-lcms2.a |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_70be8d5dc43dbc1df67ecd59c9.o |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_abdf75b6b1f064446bfb0de382.o |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_71f52eacd4ede06f6cee6faac3.o |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libtiff/libcontrib-libs-libtiff.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_21b681445fd6b9b7bc6182dcf6.o |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_33960438e2bb5426c44ac6d03a.o |56.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/5c5fdf614c3039a8dba94a4f38_raw.auxcpp |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/libcontrib-libs-libwebp.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_1e40500860b0f042e55f17298d.o |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/tools/memory_tests/bin/memory_tests |57.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libjpeg-turbo/libcontrib-libs-libjpeg-turbo.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/freetype/libcontrib-libs-freetype.a |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ut_common.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/audit_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |56.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |57.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |57.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |57.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |57.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/test_utils/test_settings.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |57.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |57.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |57.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |57.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |57.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |57.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/object_storage_listing_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/locks_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/flat_ut.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_cc203073bb2a03b31e52a78f24.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/cancel_tx_ut.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_71b7c7df3e7853e6e7cd11e484.o |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_1583476a2a074be936cf5a393e.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fdd48fc620c42f480ae38b77f5.o |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fcc835b175560db56b04f51f44.o |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_dae5a42f53b4f98bf1b9fd8118.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_9f43001a877b9e371fe700c81d.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_7a185a4b35de7733fde931d298.o |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_25d3afea4b7778a202a80125cb.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_1339ee5ef04af3a5a49d43a6c9.o |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_cee1e02beaf827051149b5ca30.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/38dcacd12926621ca72e30ce1b_raw.auxcpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/tools/memory_tests/main.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/workload/libpy3stress-reconfig_state_storage_workload-workload.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/counters.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type_service.{pb.h ... grpc.pb.h} |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_988cc467d4da79de606ebf50ee.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_f4efacd00293c5fe09c3f84a62.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_1f78e7638ae0f2e308bd7331f9.o |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/writes_monitor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/codegen/main.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/header.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |57.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.{pb.h ... grpc.pb.h} |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ut/segment_manager_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ddisk/ut/ddisk_actor_ut.cpp |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py.p5ju.yapyc3 |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2_grpc.py.p5ju.yapyc3 |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/common.h_serialized.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shard_writer.cpp |57.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/service/service.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/write_data.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |57.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py{ ... i} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |57.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |57.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |57.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |57.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/objcopy_9509442a50bd9d1393fa0d54e4.o |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/service.cpp |57.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/prefixed_vector/ydb-core-kqp-ut-indexes-prefixed_vector |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.{pb.h ... grpc.pb.h} |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_5accfe00d45fb7ebcc30e116b2.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_93665db601a12d4842de4565e2.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/durationpy/libpy3contrib-python-durationpy.global.a |57.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_b783a1a2aacb855daa1e55fad6.o |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/backup/import/task.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/service.cpp |57.8%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |57.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2.py.p5ju.yapyc3 |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2.py.p5ju.yapyc3 |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/index.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/filter.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/scan.h_serialized.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/prefixed_vector/kqp_indexes_prefixed_vector_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/builder.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/abstract.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_1190eb11d3312a7d125a043ec9.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_e5f29f327882ea485363f55ce3.o |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/data.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/links.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |57.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_python/grpc_python |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/checker.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2.py{ ... i} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py.p5ju.yapyc3 |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py.p5ju.yapyc3 |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/actors/ut_service/slow/core-kqp-federated_query-actors-ut_service-slow |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/error.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/quota_service.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py{ ... i} |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |57.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |57.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |57.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/blobsan/main.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_da2669c2228a88c83cd32d45da.o |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ec94bbf9004678001f4c8195e3.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ce0222bab1634be9f9a52f715d.o |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/objcopy_3fdb568d483b57acc8e627f8c2.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |58.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/events.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/actors/ut_service/slow/kqp_federated_query_actors_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/json/json_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/events.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py{ ... i} |57.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |57.9%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |58.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/object.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |58.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/duplicate_filtering.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |58.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |58.0%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_printer.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |57.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload/objcopy_456ef6a09159ceaa7dd7dffa59.o |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/graph_description.pb.{h, cc} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/config.pb.{h, cc} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |57.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_reader.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/common.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_1dba5118ef0a485f3bf803be50.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_28372cda055f052750260342ed.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_5954401d16336469a08f24b688.o |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/execution.h_serialized.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/session.h_serialized.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.{h, cc} |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_large.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |57.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py.siec.yapyc3 |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |57.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/s3_uri.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/rpc/status.{pb.h ... grpc.pb.h} |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/ymq.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_afb48e06933bdee6c5245db82e.o |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/options.{pb.h ... grpc.pb.h} |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/objcopy_1fa81513299be967ec5e9e5770.o |57.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/common.{pb.h ... grpc.pb.h} |57.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/events.pb.{h, cc} |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/protoc-gen-mypy |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py{, i} |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/objcopy_bcc049f94d208d0695f84c335f.o |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/retry_options.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/yt.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/max_tracker_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/numerical_maybe_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/timeofday.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_import_v1.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_lookup_unique_list_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_persqueue_v1.{pb.h ... grpc.pb.h} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_52e86d5ee8fadefdbb415ca379.o |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_2194854d9f8cbb3e0ba798b861.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_table_v1.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/cfg |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ctas_workload |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/kafka_workload |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/nemesis |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/node_broker_workload |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/oltp_workload |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |58.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/s3_backups_workload |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/simple_queue |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/statistics_workload |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/topic_kafka_workload |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk_internal.grpc.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/transfer_workload |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/topic_workload |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk_internal.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |58.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/forced_compaction.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/forced_compaction.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/simple_queue |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |58.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/base_with_blobs.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.grpc.pb.cc |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tsserver/tsserver |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk_internal.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_exchange_service.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/fast/test-results/py3test/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/service.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/libpy3simple_queue.global.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/statistics/ydb-core-kqp-ut-olap-statistics |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/config.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |57.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/objcopy_6c8bedcdc8efb835a928b278ce.o |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |57.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |57.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/common.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_index.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/executor.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |57.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_tablet.pb.{h, cc} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/ut_helpers.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tenants_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/test_server.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_auth.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut_ycsb.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/tsserver/main.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/data_plane_helpers.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/state.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/identifier.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/statistics/statistics_ut.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libpy3cpp-lwtrace-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_281dee3582d4ce59ddf75426c6.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/topic_data_ut.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_8d4997dfd93843149bbf2f76f1.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/1bbc8f31f8eb46680aacf37392_raw.auxcpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_bridge.pb.{h, cc} |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/public/api/protos/libpy3public-api-protos.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/libpy3storage-core-protos.global.a |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.{h, cc} |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.{pb.h ... grpc.pb.h} |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_cms_v1.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/statistics/kqp_olap_stats_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_88c3bdc430d7cb7f217b6a7f5c.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/manager.cpp |57.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_f7d64736a37956e015fd868cb5.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_e568ceb8c9c8dcd7bf324cb350.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/config.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/interaction.cpp |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/abstract.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter_impl.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/request_features.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_cms.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/object.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/fq_private_v1.{pb.h ... grpc.pb.h} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/restore_controller.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/timeout.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/initialization.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/create/test-results/py3test/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/partcheck/partcheck |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/service.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |57.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/parsing.cpp |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/ut/ydb-core-kqp-common-ut |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/decoder.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.{pb.h ... grpc.pb.h} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_test_shard_v1.{pb.h ... grpc.pb.h} |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |57.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_59a29ae0198b5839313f60be0c.o |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_77aad71b50f302ecb7ae9b1a11.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_388676493f4fc142dc0926df96.o |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/4342cd9f302f261f8b1a8137d8_raw.auxcpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_95b3eecc97c453f0c55c456659.o |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/d78d0f74a3f72be1016c0cf8cf_raw.auxcpp |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.pb.cc |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |57.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_e774a92982177091add3614f41.o |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_1ba56637fdb9b3e1bc9b45aa93.o |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_0291c815ab38527766323f743e.o |57.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |57.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk_internal.pb.cc |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/partcheck/main.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/ut/kqp_tli_ut.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_debug.pb.{h, cc} |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_logstore.pb.{h, cc} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/common/events.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_query_v1.{pb.h ... grpc.pb.h} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/abstract.h_serialized.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/push_limit_into_sort.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_under_map.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_optimization.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.{pb.h ... grpc.pb.h} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/correlated_filter_pullup.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_object_storage_v1.{pb.h ... grpc.pb.h} |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_debug_v1.{pb.h ... grpc.pb.h} |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/apply_cbo.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/inline_simple_in_exists_subplan.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.h_serialized.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |57.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/operation_id.pb.{h, cc} |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_operation_v1.{pb.h ... grpc.pb.h} |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |57.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |57.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/graph.pb.{h, cc} |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_execute.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_subscriber_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/config.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_common.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/counters.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/visitor.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_filter_builder.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_2aa1916d45dca98014edb3d732.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/address.cpp |57.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_const.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/aggr_common.h_serialized.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/parsing.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/constant_folding_stage.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/dq_solomon_shard.pb.{h, cc} |57.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/objcopy_81ae81681ce2388a653cfa5ba3.o |57.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/read.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/common.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/show_create.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/custom_registry.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |57.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |57.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload/objcopy_33abb7f7b873fc17cb192a5592.o |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/events.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/common.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/workload/libpy3stress-statistics_workload-workload.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/objcopy_b4ebb94deb4cea673457b77fcc.o |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |57.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk_internal.{pb.h ... grpc.pb.h} |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/objcopy_16fe303ead399e4e3e388d7d17.o |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |57.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/forced_compaction.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/read.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/schema.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/remove.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/background_controller.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/read.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |57.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_long_tx_service.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |57.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload/ydb_cli |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/user_account.{pb.h ... grpc.pb.h} |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_topic.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/workload/objcopy_1a5e4cdba128ffec3e3b5af433.o |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_table.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/activation.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/storage_meta.pb.{h, cc} |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/operation.{pb.h ... grpc.pb.h} |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |57.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_task_params.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/persqueue.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/kernels_wrapper.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/http_client.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database_service.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/fq_v1.{pb.h ... grpc.pb.h} |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud_service.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.pb.cc |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/result.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |58.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |58.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |58.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/21873bd073e995086c14763fe9_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/2ba79c66f3552fb8850c995300_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/19a4d8bcee3a72879caf24402b_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/5b972cbaab6c841731b9975b50_raw.auxcpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/3ffe85ee5fba79df8bcff3e680_raw.auxcpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/6eca5ec54e610c406326bdb0ba_raw.auxcpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/895dd3b3963123daf30549acdb_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/c29a130feee809f0d792219d59_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b04a8a4d880a6ab0d69f34e52c_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a20338fb38a7d35dd0838d3664_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b161971200ff1b3ec5b96b9241_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d52c67111e677f5b2c38c4aa17_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/f7fb2ed43c8925f3e97726cb0a_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/e14864114ae015ed6c4b77cba6_raw.auxcpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/records.pb.{h, cc} |58.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/annotations.{pb.h ... grpc.pb.h} |58.1%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ddisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access_service.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/resource.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token.{pb.h ... grpc.pb.h} |58.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/serialized_invoker.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/compressors/ut/ydb-library-yql-providers-s3-compressors-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/reference.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_sqs_topic_v1.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/access_service.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/annotations.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |58.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/annotations.{pb.h ... grpc.pb.h} |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/1962cb9fade79865d33960beb4_raw.auxcpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2.py{ ... i} |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.{pb.h ... grpc.pb.h} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/libpy3oltp_workload.global.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/objcopy_bcf2142e31bf537964dc063d11.o |58.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py{ ... i} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/compressors/ut/decompressor_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |58.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |58.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |58.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py{ ... i} |58.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |58.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/compressors/ut/output_queue_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |58.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq__intpy3___pb2.py{ ... i} |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv__intpy3___pb2.py{ ... i} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py{ ... i} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2.py{ ... i} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py{ ... i} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/issue_id.pb.{h, cc} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_internal.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/quota_internal.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |58.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_message.pb.{h, cc} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/credentials.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/task_command_executor.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/yql_pq_expr_nodes.{gen.h ... defs.inl.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/duration_moving_average.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/api.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_create.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |58.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_3c254544f1506576cfb57420e0.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py.p5ju.yapyc3 |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_51ea47529f6e53ef7e2a1e3f59.o |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_bc85f50e30e47af83461b8c21a.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/program/execution.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/942364ad5a5e5e098d1e17591f_raw.auxcpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_data.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/filters_set.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |58.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |58.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/dynamic.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/metrics_queue.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2.py.p5ju.yapyc3 |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/functions.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/flat_table_part.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |58.5%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/olap_workload |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config__intpy3___pb2.py{ ... i} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2.py{ ... i} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2.py{ ... i} |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/protos/out/out.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/yql_pg_expr_nodes.{gen.h ... defs.inl.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.{pb.h ... grpc.pb.h} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut/inside_ydb_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/counters_shard.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/storage/ydb-core-kqp-ut-olap-storage |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/iceberg_ddl_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_tablet.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/stream_logic.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/attributes/ut/attributes_md5_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/collection.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/constrained_schema.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_nbs_v1.{pb.h ... grpc.pb.h} |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |58.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/common.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/storage/compression_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_b83d9052e0bc89877bbe223294.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_716263ce181e67161f84180281.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_6b62c1db41e3ebd0278a84dced.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.h_serialized.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_eca1874c40730c61b45fdec1c1.o |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_56c6157408594466f7051b7469.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/local_cluster/local_cluster |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor/service/worker.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/events.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/counters.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/prefixed_vector/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.cc |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/events.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/task.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_951c70889c9404d1662da27090.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_e317764e105a7e9e48b67a7b7e.o |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/create_view_formatter.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/local_cluster/objcopy_e83326c043824a10ed26240d03.o |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/local_cluster/libpy3local_cluster.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/program/resolver.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/counters.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |58.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/common.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/put_status.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/abstract.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_d80f811b3fe32bcd2128d6ab6f.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_c0b503af0486d120ebabb4c64b.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_4b2ec656f7e85bc05586d7e6fc.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/scheme/csv_arrow_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io_state.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tests/ydb-tests-stability-tests |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/header.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/predicate.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/error_collector.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_11720a94357f4e62a79e34a420.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/storage/compaction_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |58.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/aae788a890ddcb1702c659c8aa_raw.auxcpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/s3_bench/main.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/inflight_limiter_ut.cpp |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/transitional/folder_service.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder_service.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/compaction_info.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder.{pb.h ... grpc.pb.h} |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_bd8a6d25e26a719f80141d0711.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_19422d2b60428207055b4ed843.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_4b767dce2ddf7a5424aef828d6.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |58.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/stability/healthcheck/libpy3library-stability-healthcheck.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_b031a661ba244dffa03ab0c7ec.o |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5a2f230528097042fdaf726fed_raw.auxcpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/events.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tests/objcopy_ebd06ca3d7aa9b5b1b6ec851b7.o |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/original.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tests/objcopy_dfafe5e4a6d4b0c6fbf440843b.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tests/objcopy_c5bab83c436501626a45161d22.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tests/objcopy_8eee7558f17cd3744d1b7a5e68.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tests/objcopy_8c41a9066682151bfb4a1a2f9a.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stability/libpy3tests-library-stability.global.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stability/utils/libpy3library-stability-utils.global.a |58.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_borrowed_compaction/ydb-core-tx-datashard-ut_borrowed_compaction |58.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.h |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/alter/ydb-tests-olap-column_compression-alter |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/events.pb.{h, cc} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |58.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/yql_ytflow_expr_nodes.{gen.h ... defs.inl.h} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_task_params.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |58.5%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/stream.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stability/healthcheck/objcopy_fbeafc7f011525fda6ed23e6e3.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stability/healthcheck/objcopy_98478d217505ced1e38b6bd497.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/stability/healthcheck/objcopy_75ddb319f6ea5a5e05fb01b29c.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_borrowed_compaction.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/objcopy_940b9a794cb8fbc6ebdf926276.o |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_auth.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |58.6%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_d23500649301df2a8de48ba70d.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk2/compaction.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |58.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/alter/objcopy_ad97bd114c44dc63b832ebb6b5.o |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/alter/objcopy_c703cf3a8939bee99894b1df7e.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/alter/objcopy_308515e78e2ea08f659b39e09d.o |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_bridge_v1.{pb.h ... grpc.pb.h} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/object.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/initializer.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/fulltext/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/generic_manager.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_pea_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/restore.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portion_interval_tree.cpp |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/fast/ydb-tests-olap-column_compression-fast |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/registration.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/fq_private.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/modification_controller.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ymq.pb.{h, cc} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/config.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/preparation_controller.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/fetch_database.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_maintenance_v1.{pb.h ... grpc.pb.h} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/ydb_value_operator.cpp |58.5%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/stability/healthcheck/ydb_cli |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_test_shard.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tier_info.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/ut/graph_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_rate_limiter_v1.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_tablet_v1.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_e2cd022168ff179d1441f5d3df.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scripting.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/fast/objcopy_c363c30511bd0a08b96221b101.o |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/fast/objcopy_b1bfc0c63dbba17c5663464590.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/dynamic_control_board_impl.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_compression/fast/objcopy_86fed7f0fa9a4ac69e635a8f87.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/inline_scalar_subplan.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/prune_columns_stage.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_source_builder.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_type_info_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/rbo/objcopy_8a4839ce59e28719331c9dfe7e.o |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/assign_stages.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/build_initial_cbo_tree.cpp |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_query_builder.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_join_builder.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/inline_cbo_tree.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_aggregation_builder.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/streaming_service.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_0ade7a5662c6292edc3a8de02f.o |58.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_c9ab749ab3188a8582c5cefa5e.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/reserve.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_sort_builder.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |58.8%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/yql_s3_expr_nodes.{gen.h ... defs.inl.h} |58.8%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/kv.h_serialized.{cpp, h} |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |58.8%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/stock.h_serialized.{cpp, h} |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/utils.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/settings.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_17cef60c2dd0eb7ea46181ba87.o |58.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/audit/login_op.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/objcopy_d0bb31c1ca2f7f4c842859052f.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/data.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/req_tracer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/column_tables.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/event_loop.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io_state.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/workload/objcopy_12968ff6e39e65b31509eac60c.o |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_compression/alter/test-results/py3test/{meta.json ... results_accumulator.log} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/profile_service.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |59.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_dynamic_config_v1.{pb.h ... grpc.pb.h} |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/update.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/32db9debcb1bd2c2ef5c8589d0_raw.auxcpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/tx_event.pb.{h, cc} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_window_frames_collector_params_deserializer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/eebe9b71357a49816a6e37bb1d_raw.auxcpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/22ce45695e33ad45347f59ec11_raw.auxcpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/33c424d1f168d258adae822128_raw.auxcpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/85072bb936b0763f4b03040c4c_raw.auxcpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ad59adc438753031259d49b25e_raw.auxcpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ad73b80f41665e6bf678725de8_raw.auxcpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup_service.{pb.h ... grpc.pb.h} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/logger_config.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/events.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |59.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |59.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |59.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/version/version_definition.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/sensitive.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/oauth_request.{pb.h ... grpc.pb.h} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |59.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_8dbc00b2eb10d4ff1ed6e47c3a.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_priority_provider.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/vector/ydb-core-kqp-ut-indexes-vector |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/value/ut/value_ut.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/push_olap_filter.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |59.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |59.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |59.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |59.1%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/projection.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |59.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/objcopy_b500d4c016dd56011b777a1283.o |59.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/objcopy_8ebbbeea46de68e6f72977a547.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/vector/kqp_indexes_vector_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/yq_internal.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py{ ... i} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/sink.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/formatters_common.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/backup/import/control.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |59.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/common.py.yapyc3 |59.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/test_streaming.py.yapyc3 |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/manager.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/SQLv1Antlr4Parser.pb.{code0.cc ... main.h} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_0c11f1377612879bf0a028062e.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_9001a43ebb2f39da4516c33deb.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tenants_ut.cpp |59.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine__intpy3___pb2.py.p5ju.yapyc3 |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_ecce6cdc5283ee9e09809febc9.o |59.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/common.py.yapyc3 |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |59.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/test_streaming.py.yapyc3 |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_e89cf02a9ed3d3ce4d135f1b6a.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_3aec050ca8485886fd4b8eafb6.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_8a4d9ee45734faa2e2e415766d.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_8b4bbe6a7e2b5ae1e2417f51a2.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_dc090340e6aea1b64bea92b3f8.o |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/edaf602b2011baa1519a223d63_raw.auxcpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_s3_writer/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ydb-core-tx-schemeshard-ut_truncate_table_reboots |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |59.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |59.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1326afc143d720f2af434cd836.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |59.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/conftest.py.yapyc3 |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |59.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/ut_common.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.{pb.h ... grpc.pb.h} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2.py.p5ju.yapyc3 |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/reading/ydb-core-kqp-ut-olap-reading |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/counters.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/fulltext/ydb-core-kqp-ut-indexes-fulltext |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2.py{ ... i} |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_expression.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard_config__intpy3___pb2.py{ ... i} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/test_watermarks.py.yapyc3 |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_5f161468ff5322b803d4d0dc79.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine__intpy3___pb2.py{ ... i} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/service.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/write.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/registry.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2.py{ ... i} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/stats.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ut_truncate_table_reboots.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/abstract.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |59.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/ss_dialog.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.{pb.h ... grpc.pb.h} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/common.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/peephole_predicate.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/deleting.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/reading/sparsed_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/reading/simple_reader_ut.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common_data.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/status_channel.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/fulltext/kqp_indexes_fulltext_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/simple_blocking_write_session_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/container.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/blobs.pb.{h, cc} |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_sequenceshard.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/access.{pb.h ... grpc.pb.h} |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/initiator.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/events.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_monitoring_v1.{pb.h ... grpc.pb.h} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49e9948af399bc60603a7d2db5.o |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.{pb.h ... grpc.pb.h} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/4399546af28cb40e5d74ea4a4b_raw.auxcpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |59.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/objcopy_8ebbbeea46de68e6f72977a547.o |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/table_record.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/expand_cbo_tree.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/task.pb.{h, cc} |59.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/kqp_common.h_serialized.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/request/common.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_rate_limiter.pb.{h, cc} |59.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/conftest.py.yapyc3 |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_map_builder.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/extract_join_expressions.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |59.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_convert_to_physical.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |59.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/workload/objcopy_3cad7ffe5c68e883c93b3c3b3a.o |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8fca143a218b930f297b779e3a.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_f458fc1e40a33beb49b77512d1.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_b2bcc4e0bf3c59d73d91f4f423.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_698c9f8753a1b6a5d23e436b7f.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_12c0c2de60710b6e317b585312.o |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_config_v1.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_auth_v1.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/push_map.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |59.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/tools/testing/loadtest/lib/protos/nbs2_load.pb.{h, cc} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_ymq_v1.{pb.h ... grpc.pb.h} |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_replication_v1.{pb.h ... grpc.pb.h} |59.2%| [CC] {tool} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |59.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/workload/ydb_cli |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_563ec4ec2a6e4654b832ebf92f.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_clickhouse_internal_v1.{pb.h ... grpc.pb.h} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_8dd70891bfbac8135389af5f53.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_4c8bcc1a1b73f17a2a0c98e126.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_eaf4986a3428dc8ee55f55b486.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/service_node/main.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_11e4572b38d275456acaf6e9ab.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_81551943d4b76a1ac162e2f749.o |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/index.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |59.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/objcopy_b500d4c016dd56011b777a1283.o |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a335c94fa1bad63896ccff7672_raw.auxcpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ce697fc3b324cb6152c4d7223d_raw.auxcpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/e1db0ed832c3e089e10350327b_raw.auxcpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/bounded_concurrency_invoker.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |59.3%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_export_v1.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_logstore_v1.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/service.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py.p5ju.yapyc3 |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |59.4%| [CC] {tool} $(B)/ydb/core/protos/feature_flags.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |59.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/abstract.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/proxy.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/storage/core/protos/request_source.pb.{h, cc} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/tenant_user_account.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_federation_discovery_v1.{pb.h ... grpc.pb.h} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_e73aed5f96567ff87b3489e87b.o |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/claims.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_6cfe787a44e849877f4b79a557.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_5f1f131b295e1fe534a12648c6.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_ddisk.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/operation.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_ce63bab0f89a8715a42271a26a.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_48e09f84949dd34b82c51f21a3.o |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |59.4%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload/ydb_cli |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_3b212908932716bae8a8e38b2c.o |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics__intpy3___pb2.py{ ... i} |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |59.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/auth/auth_helpers.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/fetcher.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/modification.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2.py{ ... i} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_pool.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_local.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_into_join.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/service/counters.cpp |59.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_counters.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_convertion_utils.cpp |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/rules/remove_extra_renames_stage.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |59.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload/objcopy_ad01b515e6ee8206b6b9fafbc1.o |59.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/histogram_borders.cpp |59.4%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload/ydb_cli |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp |59.4%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |59.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/cluster_directory.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run_ut.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset_service.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_keyvalue.pb.{h, cc} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |59.4%| [CC] {tool} $(S)/ydb/core/base/generated/codegen/main.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/events.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/common.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/composite.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_nbs.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_shared.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_base_init.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/objcopy_2bb582236b38a353ed1448f645.o |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/517d83cd504b5237c7e0810fc2_raw.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |59.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/watchdog_invoker.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/pgproxy.pb.{h, cc} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |59.5%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/provided.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_invoker.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |59.5%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload/ydb_cli |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/types/ydb-core-kqp-ut-olap-types |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampler_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tstool/tstool |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/sessions/sessions.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/validation.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_89b3e69f7cdba68b4eefcae48c.o |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/unit/client/oauth2_token_exchange/helpers/libclient-oauth2_token_exchange-helpers.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/ydb-dump.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/kv.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/service_account.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-transfer-topic-to-table.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/auth.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_coordination_v1.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/types/datatime64_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/types/bool_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/types/json_ut.cpp |59.5%| [PR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/include/llvm/IR/Attributes.inc{, .d} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/types/decimal_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/rpc.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs_fixture.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account_service.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/run_ydb.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-topic.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_configs.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/program.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_b5b36403e069f48d06f8367722.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_c693478edc1220e7a9143567d1.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_replay.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_342e8590e41686b18307d054a9.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_compiler.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/splitter.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/ytree/proto/ypath.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/misc/proto/guid.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/queue.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/tracing/proto/tracing_ext.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_proccessor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_maintenance.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/misc/proto/error.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/ytree/proto/attributes.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/config.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/objcopy_8b1494cbc491521341d984598d.o |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/68ac7951c4515300670932caa8_raw.auxcpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/misc/proto/hyperloglog.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/local_leader_election_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/rpc/proto/rpc.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/node_tracker_client/proto/node_directory.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/yson/proto/protobuf_interop.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/chunk_client/proto/data_statistics.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/node_tracker_client/proto/node.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/hive/proto/cluster_directory.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/misc/proto/protobuf_helpers.pb.{h, cc} |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/tools/testing/loadtest/lib/protos/libloadtest-lib-protos.a |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py{ ... i} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |59.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/types/bool_test_enums.h_serialized.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/objcopy_6077c98b9810fee0e2250a36a4.o |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/graph_params.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/service.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |59.6%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_finished.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/data.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |59.6%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |59.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/events/data_plane.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/mvp.pb.{h, cc} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/dayofweek.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/result_set_meta.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/persqueue.{pb.h ... grpc.pb.h} |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_backup_v1.{pb.h ... grpc.pb.h} |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_view_v1.{pb.h ... grpc.pb.h} |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |59.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/protoc |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/iam_token_service.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/device_perf_test.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |59.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_4e45fac9e6e2cbc502659b10eb.o |59.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_44fac4fe441507735704a000ad.o |59.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/forced_compaction.{pb.h ... grpc.pb.h} |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_c19b3eb5266bf8e49b2b628bc2.o |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich_constrained.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/nbs/cloud/blockstore/public/api/protos/io.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/chunk_client/proto/read_limit.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |59.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/objcopy_8ebbbeea46de68e6f72977a547.o |59.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/conftest.py.yapyc3 |59.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/common.py.yapyc3 |59.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |59.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/objcopy_b500d4c016dd56011b777a1283.o |59.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming/test_streaming.py.yapyc3 |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/ytree/proto/request_complexity_limits.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/source.pb.{h, cc} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2.py.p5ju.yapyc3 |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/sessions.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_borders_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_types_proto_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_ranges_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py.p5ju.yapyc3 |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_4a2af4000c37712b7785122f9f.o |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py.p5ju.yapyc3 |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/853739aab8b8326c327792b514_raw.auxcpp |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_0033302419990e7c37da7fa23e.o |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_ab13df51a06a892447a4ac624e.o |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_tablecell_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py{ ... i} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py{ ... i} |60.0%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |60.1%| [PD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |60.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |60.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |60.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |60.2%| [PD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |60.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |60.2%| [PB] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |60.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |60.3%| [PD] {RESULT} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |60.3%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |60.3%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |60.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |60.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |60.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |60.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |60.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |60.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |60.5%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.5%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.5%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.5%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |60.5%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.5%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |60.5%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |60.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |60.6%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |60.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |60.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.7%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |60.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |60.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |61.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |61.2%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |61.2%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |62.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |63.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/dq/state/dq_state_load_plan.cpp |63.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |63.6%| [AR] {RESULT} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |63.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/state/dq_state_load_plan.cpp |63.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |64.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |64.3%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |64.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |64.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |64.9%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |64.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |65.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |65.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |65.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |65.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |66.0%| [CC] {tool} $(B)/ydb/core/protos/grpc.pb.cc |67.3%| [CC] {tool} $(B)/ydb/core/protos/console.pb.cc |67.3%| [CC] {tool} $(B)/ydb/core/protos/console.grpc.pb.cc |68.5%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/ctas ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/ctas ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/show_create_table ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/show_create_table ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/node_broker ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/node_broker ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/show_create_view ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/show_create_view |69.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/generated/codegen/main.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/streaming ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/streaming ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/backup_stress ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/backup_stress |69.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_settings.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/transfer ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/transfer |69.2%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |69.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |69.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_settings.cpp |69.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/common/utils.cpp |69.6%| [CC] {tool} $(B)/ydb/core/protos/cluster_state_info.pb.cc ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/cdc ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |69.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |69.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/cdc |69.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |69.9%| [AR] {tool} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |70.0%| [AR] {tool} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |70.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/common/utils.cpp |70.0%| [CC] {tool} $(B)/ydb/core/protos/grpc.grpc.pb.cc |70.1%| [CC] {tool} $(B)/ydb/core/protos/console_config.grpc.pb.cc |70.2%| [CC] {tool} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |70.4%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |70.4%| [CC] {tool} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |70.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator.cpp |70.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/simple_queue ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |71.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/simple_queue |71.4%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |72.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/dq/state/ut/dq_state_load_plan_ut.cpp |72.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/registry.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/state/ut/dq_state_load_plan_ut.cpp |72.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry.cpp |72.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider_impl.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider_impl.cpp |72.1%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_provider.cpp |72.2%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |72.2%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource_type_ann.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/util.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_type_ann.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_io_discovery.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource_type_ann.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_type_ann.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_io_discovery.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/core_validators.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/core_validators.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor_base.cpp |72.2%| [CC] {tool} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor_base.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_load_meta.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_load_meta.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_io_discovery.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_io_discovery.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/error.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/error.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_helpers.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_mkql_compiler.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_helpers.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_mkql_compiler.cpp |72.2%| [CC] {tool} $(B)/ydb/core/protos/console_config.pb.cc |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/auth_config_validator.cpp |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |72.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |72.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |72.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |72.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/monitoring_config_validator.cpp |72.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/monitoring_config_validator.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_execution.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink_execution.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |72.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |72.3%| [AR] {RESULT} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasink.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/format_handler.cpp |72.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |72.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_integration.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_integration.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_config.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_config.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/statuses.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/statuses.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/util/config_index.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |72.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/util/libcms-console-util.a |72.3%| [AR] {RESULT} $(B)/ydb/core/cms/console/util/libcms-console-util.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util/config_index.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init_noop.cpp |72.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_noop.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_optimize.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_ytflow_optimize.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_datasource.cpp |72.3%| [CC] {tool} $(B)/ydb/core/protos/config.grpc.pb.cc |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/usage/config.cpp |72.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/config.cpp |72.3%| [CC] {tool} $(B)/ydb/core/protos/msgbus.pb.cc |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |72.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_physical_optimize.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_physical_optimize.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_logical_opt.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_logical_opt.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/version/version.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/version.cpp |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/version/libversion.a |72.4%| [AR] {RESULT} $(B)/ydb/core/driver_lib/version/libversion.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/usage/service.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/service.cpp |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |72.4%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_dq_integration.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/validators.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/provider/yql_pq_dq_integration.cpp |72.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.pb.cc |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/libcore-config-validation.a |72.4%| [AR] {RESULT} $(B)/ydb/core/config/validation/libcore-config-validation.a |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |72.4%| [AR] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |72.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.cc |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/service/service.cpp |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |72.4%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |72.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |72.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/service/service.cpp |72.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |72.4%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |72.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |72.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |72.5%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |72.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |72.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/console_dumper.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |72.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |72.5%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config.cpp |72.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config.cpp |72.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/events.cpp |72.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/events.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |72.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |72.6%| [AR] {RESULT} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/dummy.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/dummy.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |72.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/helpers.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/helpers.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |72.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |72.6%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |72.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |72.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_check.cpp |72.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/http.cpp |72.6%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |72.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |72.6%| [AR] {RESULT} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |72.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/http.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/public_http/http_req.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/base/msgbus.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_req.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |72.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/base/libpublic-lib-base.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |72.7%| [AR] {RESULT} $(B)/ydb/public/lib/base/libpublic-lib-base.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/base/msgbus.cpp |72.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |72.7%| [AR] {RESULT} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/queues/std/queries.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/std/queries.cpp |72.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |72.7%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |72.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |72.7%| [AR] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |72.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |72.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/log_backend/log_backend_build.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend_build.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__configure.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__configure.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_event_filter.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/wilson_tracing_control.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/wilson_tracing_control.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_event_filter.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_delete.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_delete.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/tablet_killer.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/tablet_killer.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/replica.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_monitoring.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_monitoring.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config_parser.cpp |72.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |72.8%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_parser.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |72.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/bridge.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/bridge.cpp |72.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |72.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |72.9%| [CC] {tool} $(B)/ydb/core/protos/config.pb.cc |72.9%| [AR] {tool} $(B)/ydb/core/protos/libydb-core-protos.a |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |72.9%| [LD] {tool} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |72.9%| [LD] {tool} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_proxy.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/modifications_validator.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_proxy.cpp |72.9%| [LD] {tool} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |72.9%| [LD] {tool} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_logins.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_write.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_write.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_replica.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_replica.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/error.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/error.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_log.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/yql_single_query.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/logs/log.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/yql_single_query.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/logs/log.cpp |72.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |72.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |73.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/log_backend/log_backend.cpp |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |73.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |73.0%| [AR] {RESULT} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/keyvalue_write.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/keyvalue_write.cpp |73.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |73.0%| [AR] {RESULT} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_calls.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |73.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |73.0%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_log.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/types.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_log.cpp |73.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/types.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/action.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/action.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |73.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |73.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/events_writer.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/events_writer.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |73.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |73.1%| [AR] {RESULT} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/error/error.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/error/error.cpp |73.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |73.1%| [AR] {RESULT} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |73.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/queue_attributes.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/queue_attributes.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/nbs2_load_actor.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/nbs2_load_actor.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/validators_ut.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/core_ydbc.cpp |73.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydbc.cpp |73.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |73.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__load_state.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__load_state.cpp |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |73.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_config.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/xml.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/xml.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |73.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/compatibility/configs/dump/config-meta.json |73.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |73.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |73.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/library/compatibility/configs/dump/config-meta.json |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/wrappers/s3_storage_config.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/s3_storage_config.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/core_ydb.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydb.cpp |73.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/libydb-mvp-core.a |73.2%| [AR] {RESULT} $(B)/ydb/mvp/core/libydb-mvp-core.a |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |73.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |73.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/ut/main.cpp |73.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |73.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/ut/main.cpp |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |73.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/discovery_actor.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/kqp.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/discovery_actor.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/kqp.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/meta/meta_versions.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta_versions.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/http_service.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_service.cpp |73.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |73.3%| [AR] {RESULT} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |73.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |73.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |73.3%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |73.3%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/monitoring.cpp |73.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |73.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/monitoring.cpp |73.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |73.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |73.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |73.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |73.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.h |73.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |73.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |73.4%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |73.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |73.4%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |73.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |73.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |73.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |73.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/usage/config.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/config.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/s3_bench/main.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/s3_bench/main.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/service.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/service.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/actors/http/http_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/actors/http/http_ut.cpp |73.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |73.5%| [AR] {RESULT} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/audit/audit_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/test_utils/test_settings.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/test_utils/test_settings.cpp |73.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/test_utils/libsecurity-ldap_auth_provider-test_utils.a |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/test_utils/libsecurity-ldap_auth_provider-test_utils.a |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |73.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/compile_service.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.pb.cc |73.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.pb.cc |73.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |73.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.grpc.pb.cc |73.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |73.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |73.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/run_query.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/run_query.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/stats.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/stats.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_helper.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/filters_set.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/filters_set.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/proxy.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/profiler.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/profiler.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/purecalc_filter.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/purecalc_filter.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |73.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/meta/meta.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta.cpp |73.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |73.6%| [AR] {RESULT} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |73.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |73.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/defs.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/defs.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/control.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/control.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |73.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/block_events.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/block_events.cpp |73.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/wait_events.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/wait_events.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |73.7%| [AR] {RESULT} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/view/grpc_service.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/view/grpc_service.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/view/libydb-services-view.a |73.7%| [AR] {RESULT} $(B)/ydb/services/view/libydb-services-view.a |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/view/libydb-services-view.a |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/test_shard/grpc_service.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/test_shard/grpc_service.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |73.7%| [AR] {RESULT} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/auth/grpc_service.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/auth/grpc_service.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/auth/libydb-services-auth.a |73.7%| [AR] {RESULT} $(B)/ydb/services/auth/libydb-services-auth.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/bridge/grpc_service.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/bridge/grpc_service.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/bridge/libydb-services-bridge.a |73.7%| [AR] {RESULT} $(B)/ydb/services/bridge/libydb-services-bridge.a |73.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |73.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/tablet/ydb_tablet.cpp |73.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/tablet/ydb_tablet.cpp |73.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/tablet/libydb-services-tablet.a |73.8%| [AR] {RESULT} $(B)/ydb/services/tablet/libydb-services-tablet.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/replication/grpc_service.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/replication/grpc_service.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/replication/libydb-services-replication.a |73.8%| [AR] {RESULT} $(B)/ydb/services/replication/libydb-services-replication.a |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/config/grpc_service.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/grpc_service.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/libydb-services-config.a |73.8%| [AR] {RESULT} $(B)/ydb/services/config/libydb-services-config.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/grpc_service.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/grpc_service.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_object_storage.cpp |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/config/libydb-services-config.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/local_discovery/grpc_service.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/local_discovery/grpc_service.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |73.8%| [AR] {RESULT} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/monitoring/grpc_service.cpp |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |73.8%| [AR] {RESULT} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/monitoring/grpc_service.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |73.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/discovery/grpc_service.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/discovery/grpc_service.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/discovery/libydb-services-discovery.a |73.8%| [AR] {RESULT} $(B)/ydb/services/discovery/libydb-services-discovery.a |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |73.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init_ut.cpp |73.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_ut.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_export.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_export.cpp |73.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/nbs/grpc_service.cpp |73.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/nbs/libydb-services-nbs.a |73.9%| [AR] {RESULT} $(B)/ydb/services/nbs/libydb-services-nbs.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/nbs/grpc_service.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scripting.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/nbs/libydb-services-nbs.a |73.9%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting.cpp |73.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/rate_limiter/grpc_service.cpp |73.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |73.9%| [AR] {RESULT} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/grpc_service.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_operation.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_operation.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/backup/grpc_service.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scheme.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/private_grpc.cpp |73.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/backup/libydb-services-backup.a |73.9%| [AR] {RESULT} $(B)/ydb/services/backup/libydb-services-backup.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scheme.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_import.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/backup/grpc_service.cpp |73.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/private_grpc.cpp |73.9%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/node_broker_workload |73.9%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/node_broker_workload |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |73.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/4d5e90a82c3d6e8c08a37d8154_raw.auxcpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_debug.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_debug.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |73.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |73.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |73.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |73.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |73.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.pb.cc |73.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |73.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.pb.cc |74.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.pb.cc |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/cms/grpc_service.cpp |74.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/libydb-services-cms.a |74.0%| [AR] {RESULT} $(B)/ydb/services/cms/libydb-services-cms.a |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/grpc_service.cpp |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/write_data.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/write_data.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/maintenance/grpc_service.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |74.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |74.0%| [AR] {RESULT} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/maintenance/grpc_service.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service_ut.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service_ut.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.cc |74.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.cc |74.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |74.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |74.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |74.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |74.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |74.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |74.0%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |74.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |74.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |74.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/56f374d81ad14f072a4df966aa_raw.auxcpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |74.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |74.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |74.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |74.1%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |74.1%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |74.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |74.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.1%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/simple_queue |74.1%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/simple_queue |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/service.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.pb.cc |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/service.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c85d6dfb7100ccc30488c4ec9a_raw.auxcpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/ydb_over_fq.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/service/manager.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/manager.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ydb_over_fq.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/9b8f667efdecb908b3197117f5_raw.auxcpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.cc |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.cc |74.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |74.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |74.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/usage/config.cpp |74.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |74.1%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/config.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |74.2%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |74.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/init.h_serialized.cpp |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |74.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |74.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |74.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/state/ut/ydb-library-yql-dq-state-ut |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |74.2%| [LD] {RESULT} $(B)/ydb/library/yql/dq/state/ut/ydb-library-yql-dq-state-ut |74.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_logstore.cpp |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_query.cpp |74.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |74.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |74.2%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore.cpp |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/version/version_definition.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/version/version_definition.cpp |74.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/apps/version/libversion_definition.a |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |74.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/version/libversion_definition.a |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |74.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |74.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/state/ut/ydb-library-yql-dq-state-ut |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/manager.cpp |74.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/manager.cpp |74.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |74.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |74.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |74.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/dynamic_config/grpc_service.cpp |74.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |74.3%| [AR] {RESULT} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/grpc_service.cpp |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |74.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/ff6190fab83f2fbb3c188b72b9_raw.auxcpp |74.3%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |74.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |74.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |74.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.grpc.pb.cc |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |74.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |74.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |74.3%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |74.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |74.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/35cd3c4b96eacfc5979f8a7b6c_raw.auxcpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/7a8f263baf9436f1a5b95d265f_raw.auxcpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/config.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/config.cpp |74.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |74.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/config.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/config.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/audit_log_impl.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_log_impl.cpp |74.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |74.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |74.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/libydb-core-audit.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |74.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |74.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |74.4%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |74.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |74.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |74.4%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |74.4%| [EN] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.grpc.pb.cc |74.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |74.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.h |74.4%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |74.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |74.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/usage/service.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/service.cpp |74.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/init.h_serialized.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |74.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/libcore-config-init.a |74.4%| [AR] {RESULT} $(B)/ydb/core/config/init/libcore-config-init.a |74.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |74.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/events/data_plane.cpp |74.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |74.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/events/data_plane.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |74.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/serializer/native.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/native.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/counters.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portion_interval_tree.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portion_interval_tree.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/counters.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/program/execution.cpp |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/common/service.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/program/execution.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/service.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |74.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |74.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/service/service.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/service.cpp |74.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/grpc_service.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/grpc_service.cpp |74.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/libydb-services-fq.a |74.6%| [AR] {RESULT} $(B)/ydb/services/fq/libydb-services-fq.a |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/service/service.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/service/service.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.grpc.pb.cc |74.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |74.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.pb.cc |74.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |74.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |74.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |74.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |74.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.pb.cc |74.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_cache.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/snapshot.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/snapshot.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_http_server.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_http_server.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/vdisk_write.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/vdisk_write.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_read.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_read.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/service_actor.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/service_actor.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |74.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_types.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_types.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |74.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.8%| [AR] {RESULT} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/events/events.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/events.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/mon.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon.cpp |74.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/libydb-core-mon.a |74.8%| [AR] {RESULT} $(B)/ydb/core/mon/libydb-core-mon.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |74.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |74.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |74.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |74.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |74.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |74.9%| [AR] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |74.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.pb.cc |74.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_console.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_console.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/group_write.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/group_write.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__create_tenant.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__create_tenant.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/monitoring.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/monitoring.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__status.cpp |75.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__status.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/initializer.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_pool.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/initializer.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_pool.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/access.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/behaviour.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/behaviour.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_load_state.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_load_state.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tablet_info.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_info.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/audit/audit.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit.cpp |75.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |75.1%| [AR] {RESULT} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |75.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_group_info.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_group_info.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/monitor.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/monitor.cpp |75.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |75.2%| [AR] {RESULT} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/factories.cpp |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/factories.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/erasure_checkers.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/erasure_checkers.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_api_handler.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_api_handler.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_update_config.cpp |75.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |75.2%| [AR] {RESULT} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_config.cpp |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/events/internal.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/internal.cpp |75.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.2%| [AR] {RESULT} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/downtime.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |75.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |75.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |75.3%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/main.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/main.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/object.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/object.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |75.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |75.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/fetcher.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/node_info.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/node_info.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_proxy/local_pq_client/local_federated_topic_client.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_federated_topic_client.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |75.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |75.3%| [AR] {RESULT} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |75.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |75.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |75.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/reassign_actor.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/reassign_actor.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/common.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/common.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ddisk_write.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ddisk_write.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__set_down.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__set_down.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/lease_holder.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/lease_holder.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/fix_transaction_states.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/fix_transaction_states.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_balancer.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_balancer.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/ic_storage_transport.cpp |75.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/ic_storage_transport.cpp |75.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/liblibs-storage-storage_transport.a |75.4%| [AR] {RESULT} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/liblibs-storage-storage_transport.a |75.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/storage_transport/liblibs-storage-storage_transport.a |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pq.cpp |75.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |75.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.5%| [AR] {RESULT} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/common.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/common.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/service.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/service.cpp |75.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.5%| [AR] {RESULT} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/labels_maintainer.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/labels_maintainer.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__register_node.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__register_node.cpp |75.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.5%| [AR] {RESULT} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/local.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/local.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/memory_controller/memory_controller.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |75.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.5%| [AR] {RESULT} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller.cpp |75.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_column_statistics_utils.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_utils.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |75.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |75.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/manager.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/manager.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_ddisk.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_ddisk.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/run.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |75.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.6%| [AR] {RESULT} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/get_group.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/get_group.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/node_report.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/bridge.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/node_report.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bridge.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |75.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |75.6%| [AR] {RESULT} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |75.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/grouper.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |75.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |75.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ddisk.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ddisk.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/grpc_server.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_server.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__register_node.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__register_node.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/register_node.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/register_node.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/signals/owner.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/signals/owner.cpp |75.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/signals/libydb-library-signals.a |75.7%| [AR] {RESULT} $(B)/ydb/library/signals/libydb-library-signals.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/migrate.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/migrate.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/bsc.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bsc.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/kesus/grpc_service.cpp |75.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.7%| [AR] {RESULT} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/kesus/grpc_service.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/boot_queue.cpp |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/boot_queue.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config.cpp |75.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |75.7%| [AR] {RESULT} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |75.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |75.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/dynamic_nameserver.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |75.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |75.8%| [AR] {RESULT} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver.cpp |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/shred.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/shred.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/scrub.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/scrub.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |75.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/libydb-core-mind.a |75.8%| [AR] {RESULT} $(B)/ydb/core/mind/libydb-core-mind.a |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ymq/ymq_proxy.cpp |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/ymq_proxy.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/domain_info.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/domain_info.cpp |75.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/librun.a |75.8%| [AR] {RESULT} $(B)/ydb/core/driver_lib/run/librun.a |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive.cpp |75.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/librun.a |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/fill.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/fill.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/balancer.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/balancer.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/drain.cpp |75.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/drain.cpp |75.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/common/actor.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/common/actor.cpp |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |75.9%| [AR] {RESULT} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/query_actor/query_actor.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.9%| [AR] {RESULT} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor.cpp |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_statics.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_statics.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_domains.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_domains.cpp |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.9%| [AR] {RESULT} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_log.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_log.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/table_creator/table_creator.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator.cpp |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |76.0%| [AR] {RESULT} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__process_metrics.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_metrics.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ymq/grpc_service.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ymq/libydb-services-ymq.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |76.0%| [AR] {RESULT} $(B)/ydb/services/ymq/libydb-services-ymq.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/grpc_service.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |76.0%| [AR] {RESULT} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/follower_edge.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execution_unit.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/import_s3.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |76.1%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector.cpp |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |76.1%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_impl.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |76.2%| [AR] {RESULT} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/backup_unit.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client_helpers.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client_helpers.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/memory.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/memory.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/load_actor_adapter.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/load_actor_adapter.cpp |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [AR] {RESULT} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_client.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |76.3%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/fast_path_service.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/fast_path_service.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |76.3%| [AR] {RESULT} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_actor.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_actor.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group_in_mem.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/direct_block_group_in_mem.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/liblibs-storage-partition_direct.a |76.3%| [AR] {RESULT} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/liblibs-storage-partition_direct.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/liblibs-storage-partition_direct.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |76.3%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |76.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_query_text_cache_service.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_query_text_cache_service.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_read_session.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_read_session.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.4%| [AR] {RESULT} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_write_session.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_proxy/local_pq_client/local_topic_write_session.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/local_proxy/local_pq_client/libcore-local_proxy-local_pq_client.a |76.5%| [AR] {RESULT} $(B)/ydb/core/local_proxy/local_pq_client/libcore-local_proxy-local_pq_client.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/local_proxy/local_pq_client/libcore-local_proxy-local_pq_client.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__init.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_transform.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_transform.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |76.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/space_monitor.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/space_monitor.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_upload.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_upload.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_translate.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_translate.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_load.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_load.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |76.7%| [AR] {RESULT} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |76.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |76.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_runner.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_runner.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__write.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/put_records_actor.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/put_records_actor.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |76.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |76.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_write.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_write.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_init_schema.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_init_schema.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/given_id_range.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |76.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_scan.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/actors/ut_service/common/helpers.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/actors/ut_service/common/helpers.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/actors/ut_service/common/libactors-ut_service-common.a |76.8%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/actors/ut_service/common/libactors-ut_service-common.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/actors/ut_service/common/libactors-ut_service-common.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_common.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/mon_main.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/mon_main.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_trash.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_trash.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_delete.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_delete.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |76.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |76.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_mon.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_mon.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_uncertain.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_uncertain.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_scan.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_scan.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/key_validator.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_nbs_io.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_nbs_io.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |77.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |77.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |77.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |77.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_resolve.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_resolve.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/upload_stats.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx.cpp |77.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/libydb-core-tx.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/libydb-core-tx.a |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/operation.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/garbage_collection.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/garbage_collection.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/coro_tx.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/coro_tx.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/s3_writer.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/s3_writer.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/shard_impl.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/shard_impl.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/counters/counters.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/counters/counters.cpp |77.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.1%| [AR] {RESULT} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/service.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/service.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_locks.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/blob_depot.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blob_depot.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/restore_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_load.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_load.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |77.2%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_decommit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/worker.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_decommit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker.cpp |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |77.2%| [AR] {RESULT} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/assimilator.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/logging.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/logging.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/assimilator.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/truncate_unit.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/truncate_unit.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_gc.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_gc.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/controller.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/controller.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/table_settings.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/blocks.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blocks.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/event.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/event.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_view.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_replica.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_replica.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_apply_config.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_apply_config.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_replication.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_publish.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_publish.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_nbs.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_nbs.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_ping.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_import.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/grpc_service.cpp |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |77.5%| [AR] {RESULT} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/grpc_service.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_export.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/appdata.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/appdata.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_fq.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/operation_helpers.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_config.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_config.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |78.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |78.0%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_backup.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/snapshot.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/snapshot.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |78.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |78.2%| [AR] {RESULT} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/manager.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/manager.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |78.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |78.3%| [AR] {RESULT} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cms.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_startup.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_startup.cpp |78.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |78.3%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_truncate_table.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_truncate_table.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |78.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |78.4%| [AR] {RESULT} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |78.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |78.4%| [AR] {RESULT} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_login.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/fetcher.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/fetcher.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/checker_secret.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_secret.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/initializer.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/initializer.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/health/health.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/health/health.cpp |78.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |78.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/request.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/request.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/local_leader_election.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/local_leader_election.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |78.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |78.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |78.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |78.5%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/comm.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/comm.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/init/init.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/init/init.cpp |78.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |78.6%| [AR] {RESULT} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_slider.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_slider.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__progress.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__progress.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/random.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/random.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/segment_manager.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/segment_manager.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |78.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.6%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_persistent_buffer.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_persistent_buffer.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.7%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/secret.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__create.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_forced_compaction__create.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/fetcher.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/fetcher.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/checker_access.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_access.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |78.7%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/sharding.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/sharding.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/backup.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.7%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/backup.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_read_write.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_read_write.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/load_test.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/load_test.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/locks/locks.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |78.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |78.8%| [AR] {RESULT} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/locks/locks.cpp |78.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_sync.cpp |78.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |78.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_sync.cpp |78.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/populator.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/utils.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/utils.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |78.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |78.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |78.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |78.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |78.8%| [AR] {RESULT} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |78.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/execute_queue.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/execute_queue.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__init.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__init.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |78.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_chunks.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_chunks.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/manager.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/manager.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/blocks.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blocks.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_boot.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_boot.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/garbage.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/garbage.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/config_helpers.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/config_helpers.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/agent.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/agent.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_connect.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/metrics.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ddisk_actor_connect.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/metrics.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ddisk/libcore-blobstorage-ddisk.a |79.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ddisk/libcore-blobstorage-ddisk.a |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ddisk/libcore-blobstorage-ddisk.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/cache.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/sasl/plain_auth_actor.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/sasl/plain_auth_actor.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.0%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |79.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/create_queue.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/create_queue.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |79.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/sasl/base_auth_actors.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/sasl/base_auth_actors.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/sasl/scram_auth_actor.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/sasl/scram_auth_actor.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/service.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/logger.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/logger.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_handshake.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_handshake.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/proxy.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/proxy.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/database/database.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/database.cpp |79.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.1%| [AR] {RESULT} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_metrics.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_metrics.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_provider.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_provider.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/query.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/query.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/read.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/read.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/status.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/status.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/change_message_visibility.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/change_message_visibility.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/service/ext_counters.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/ext_counters.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/service_impl.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/node_checkers.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service_impl.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/node_checkers.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |79.2%| [AR] {RESULT} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/actor.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/actor.cpp |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/permissions.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/permissions.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/groups.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/owners.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/groups.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/owners.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/s3.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/s3.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_manager.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_manager.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/users.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/users.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/nodes/nodes.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/nodes/nodes.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |79.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__set_config.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__set_config.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/immediate_control_board_actor.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/libydb-core-control.a |79.3%| [AR] {RESULT} $(B)/ydb/core/control/libydb-core-control.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/libydb-core-control.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/group_members.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/group_members.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |79.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_init.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/bootstrapper.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_metrics.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/service/sysview_service.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_metrics.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/sysview_service.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |79.3%| [AR] {RESULT} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_reset.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_reset.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/node_whiteboard.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_whiteboard.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/resource_broker.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_lookup.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_lookup.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/processor.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_guardian.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/logger.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_guardian.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/logger.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/libydb-core-base.a |79.4%| [AR] {RESULT} $(B)/ydb/core/base/libydb-core-base.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_sys.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/libydb-core-base.a |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_sys.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_resolver.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/object_storage.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |79.4%| [AR] {RESULT} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/discovery/discovery.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/discovery/discovery.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/discovery/libydb-core-discovery.a |79.4%| [AR] {RESULT} $(B)/ydb/core/discovery/libydb-core-discovery.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/testing.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/testing.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/info_collector.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/info_collector.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/sasl/plain_ldap_auth_proxy_actor.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/sasl/plain_ldap_auth_proxy_actor.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |79.4%| [AR] {RESULT} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/pdisks.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/pdisks.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/libydb-core-tablet.a |79.4%| [AR] {RESULT} $(B)/ydb/core/tablet/libydb-core-tablet.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/db_counters.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/db_counters.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |79.4%| [AR] {RESULT} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |79.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/config.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/config.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/vslots.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/vslots.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |79.5%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/utils.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/utils.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.5%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/login_page.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_page.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/groups.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/groups.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |79.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ticket_parser.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/libydb-core-security.a |79.5%| [AR] {RESULT} $(B)/ydb/core/security/libydb-core-security.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/libydb-core-security.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/scan.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/tablets/tablets.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/http.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |79.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |79.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/scan.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/tablets/tablets.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/http.cpp |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |79.6%| [AR] {RESULT} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.6%| [AR] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |79.6%| [AR] {RESULT} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |79.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |79.6%| [AR] {RESULT} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/set_queue_attributes.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |79.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/set_queue_attributes.cpp |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/purge_queue.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/purge_queue.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |79.7%| [AR] {RESULT} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.7%| [AR] {RESULT} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |79.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/get_queue_attributes.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/get_queue_attributes.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |79.7%| [AR] {RESULT} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |79.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/delete_message.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/delete_message.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/public_http/http_service.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_service.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/libydb-core-public_http.a |79.8%| [AR] {RESULT} $(B)/ydb/core/public_http/libydb-core-public_http.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/writer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/writer.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |79.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_purger.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_purger.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/topic.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |79.8%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_app.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_app.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |79.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |79.8%| [AR] {RESULT} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/list_queues.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/list_queues.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cluster_info.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/quoter_base.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quoter_base.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |79.9%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |79.9%| [AR] {RESULT} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/receive_message.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/receive_message.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |79.9%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |79.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/libcore-client-server.a |79.9%| [AR] {RESULT} $(B)/ydb/core/client/server/libcore-client-server.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/api_adapters.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/api_adapters.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__load_state.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__load_state.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/send_message.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/send_message.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__init_scheme.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__init_scheme.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |80.0%| [AR] {RESULT} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/auth_factory.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_factory.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/proxy.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.0%| [AR] {RESULT} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/libydb-core-cms.a |80.0%| [AR] {RESULT} $(B)/ydb/core/cms/libydb-core-cms.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/auth_actors.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_actors.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/libcore-cms-console.a |80.0%| [AR] {RESULT} $(B)/ydb/core/cms/console/libcore-cms-console.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |80.0%| [AR] {RESULT} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |80.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/ydbd/main.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydbd/main.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_host.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_host.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |80.1%| [AR] {RESULT} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/runtime.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/runtime.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.2%| [AR] {RESULT} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tx_helpers.cpp |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tx_helpers.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/executor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |80.2%| [AR] {RESULT} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/executor.cpp |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/health_check/health_check.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.2%| [AR] {RESULT} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check.cpp |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/retention.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/retention.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/column_families.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/column_families.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |80.2%| [AR] {RESULT} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_profiles.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_profiles.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_settings.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_settings.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_ut.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/node_tracker.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/node_tracker.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |80.3%| [AR] {RESULT} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/count_queues.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/count_queues.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |80.3%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/schema.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/schema.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_tx.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_description.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/actors/ut_service/fast/kqp_federated_query_actors_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/actors/ut_service/fast/kqp_federated_query_actors_ut.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/describe.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/send_message.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/describe.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/send_message.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/untag_queue.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/untag_queue.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/appdata.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/appdata.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queue_leader.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_leader.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/actor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queue_schema.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_schema.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/helpers.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/helpers.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/service.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/service.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_permissions.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_permissions.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_group/main.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/memory_tracker.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/memory_tracker.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/tag_queue.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/tag_queue.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/failure_injection.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/failure_injection.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/libydb-core-util.a |80.5%| [AR] {RESULT} $(B)/ydb/core/util/libydb-core-util.a |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/libydb-core-util.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/ut_helpers.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_queue.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_queue.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/change_visibility.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/change_visibility.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_queues.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queues.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |80.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |80.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/ut_truncate/datashard_ut_truncate.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_truncate/datashard_ut_truncate.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/common_helper.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/common_helper.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/services.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/services.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |80.6%| [AR] {RESULT} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |80.6%| [AR] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/http_req.cpp |80.7%| [AR] {RESULT} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |80.7%| [AR] {RESULT} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_req.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_purger_ut.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_purger_ut.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.8%| [AR] {RESULT} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tracing/tablet_info.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tracing/tablet_info.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tracing/libydb-core-tracing.a |80.8%| [AR] {RESULT} $(B)/ydb/core/tracing/libydb-core-tracing.a |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |80.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/wrappers/fs_storage.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/fs_storage.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |80.8%| [AR] {RESULT} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |80.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |80.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |80.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |80.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |80.9%| [AR] {RESULT} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |81.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_dummy.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_dummy.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/libydb-services-ydb.a |81.0%| [AR] {RESULT} $(B)/ydb/services/ydb/libydb-services-ydb.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |81.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/background_controller.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/background_controller.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |81.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |81.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |81.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__tx_abort.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__tx_abort.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |81.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |81.1%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |81.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ut_truncate_table_simple.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ut_truncate_table_simple.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |81.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.2%| [AR] {RESULT} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |81.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_table.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_table.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |81.3%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/counters.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/counters.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |81.3%| [AR] {RESULT} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/actors/kqp_federated_query_actors.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/actors/kqp_federated_query_actors.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/actors/libkqp-federated_query-actors.a |81.3%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/actors/libkqp-federated_query-actors.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/actors/libkqp-federated_query-actors.a |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |81.4%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/http.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |81.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/http.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |81.4%| [AR] {RESULT} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |81.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/shard_key_ranges.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/shard_key_ranges.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |81.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_shard_context.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_context.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/scheme.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/scheme.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_state.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_state.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_write.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_write.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/state_server_interface.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/state_server_interface.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_initialize.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_initialize.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/time_cast/time_cast.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast.cpp |81.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/row_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/row_table.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc_output.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_output.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/column_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/column_table.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_pipe_req.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_pipe_req.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc_input.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_query.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_input.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_query.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/fake_coordinator.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/fake_coordinator.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_request.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_request.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_tablet.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_tablet.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/transfer_writer.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/transfer_writer.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/transfer/libydb-core-transfer.a |81.6%| [AR] {RESULT} $(B)/ydb/core/transfer/libydb-core-transfer.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |81.6%| [AR] {RESULT} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_browse.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_browse.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |81.6%| [AR] {RESULT} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_pq.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pq.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/metering.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/metering.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/topic_description.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/topic_description.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_topic_data.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_topic_data.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.6%| [AR] {RESULT} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_operation.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_operation.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |81.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/auth_factory.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_factory.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tablet_helpers.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_helpers.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/write_actor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/write_actor.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |81.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/external_table_description.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/external_table_description.cpp |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |81.7%| [AR] {RESULT} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/purge.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.7%| [AR] {RESULT} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |81.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_users.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_users.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_wb_req.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_wb_req.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/test_runtime.cpp |81.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |81.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |81.8%| [AR] {RESULT} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tenant_runtime.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |81.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tenant_runtime.cpp |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |81.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/receive_message.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/receive_message.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |81.8%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/proxy_service.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_service.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |81.8%| [AR] {RESULT} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/purge_queue.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge_queue.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_init.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_init.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |81.8%| [AR] {RESULT} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/cs_helper.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/cs_helper.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/blobsan/main.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/blobsan/main.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_message.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_message.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |81.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |81.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/create_user.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_user.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_user.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_user.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/create_queue.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_queue.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_proccessor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_large.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/test_client.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/test_client.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |82.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.0%| [AR] {RESULT} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/local_leader_election_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/local_leader_election_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_compiler.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_state_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ut_truncate_table_reboots.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ut_truncate_table_reboots.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |82.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/libydb-core-viewer.a |82.1%| [AR] {RESULT} $(B)/ydb/core/viewer/libydb-core-viewer.a |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/types/bool_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/types/bool_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/mvp_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/run_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/ut/state_storage_follower_ids_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/ut/state_storage_follower_ids_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/retro_tracing.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/retro_tracing.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_ut_pool.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/locks_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/locks_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_whoami_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_whoami_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/persistent_buffer_space_allocator.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/persistent_buffer_space_allocator.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/config/bsconfig_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/partition_direct_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_labeled.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tenants_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tenants_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/s3_writer_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/s3_writer_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/reading/sparsed_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/reading/sparsed_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/simple_blocking_write_session_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/simple_blocking_write_session_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/types/decimal_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/types/decimal_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_ut_configs.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_type_info_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_type_info_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/operations/delete_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/operations/delete_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/operations/write_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/operations/write_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/ut/graph_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cluster_info_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_copy_table.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_copy_table.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk2/compaction.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/compaction.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/vector/kqp_indexes_vector_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/vector/kqp_indexes_vector_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_auth.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_auth.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_borrowed_compaction.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_borrowed_compaction.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/storage/compaction_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/storage/compaction_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_pea_ut.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_pea_ut.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_subscriber_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/storage/compression_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/storage/compression_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/query_actor/query_actor_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/mon_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ut_ycsb.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/statistics/kqp_olap_stats_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/statistics/kqp_olap_stats_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/statistics/statistics_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/statistics/statistics_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/datastreams_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tenants_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ut/segment_manager_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ut/segment_manager_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ddisk/ut/ddisk_actor_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ddisk/ut/ddisk_actor_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/actors/ut_service/slow/kqp_federated_query_actors_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/actors/ut_service/slow/kqp_federated_query_actors_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_common.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/prefixed_vector/kqp_indexes_prefixed_vector_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/cancel_tx_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/prefixed_vector/kqp_indexes_prefixed_vector_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ut_common.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ut_common.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/audit_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/audit_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_query_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_login_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/object_storage_listing_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_import_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/flat_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/flat_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/ddisk.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ddisk.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/main.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/main.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ticket_parser_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_counters.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_common.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_auth.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/main.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/cms/cms_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/cms_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/table_creator/table_creator_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_replay.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_kqp.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/resource_broker_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_ut_local.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/ut_common.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/ut_common.cpp |83.8%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |83.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/ut_helpers.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/health_check/health_check_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |83.8%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |83.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/types/datatime64_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/types/datatime64_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |83.8%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |83.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/downtime_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |83.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |83.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/types/json_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/types/json_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |83.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |83.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |83.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |83.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/topic_data_ut.cpp |83.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |83.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/remove_identity_map.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/remove_identity_map.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |83.9%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |83.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_tli.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_tli.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/reading/simple_reader_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/reading/simple_reader_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |84.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/ctas_workload |84.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ctas_workload |84.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |84.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/48e3e82568a27690f850e09bd0_raw.auxcpp |84.0%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.0%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/test_server.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/test_server.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.0%| [AR] {RESULT} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |84.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |84.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |84.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_utils.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_utils.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |84.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_expression.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_expression.cpp |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/activation.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/activation.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |84.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/alter_impl.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter_impl.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |84.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |84.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/tier/object.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/object.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |84.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/push_limit_into_sort.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/push_limit_into_sort.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |84.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |84.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/object.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/object.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/usage/service.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/service.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/fulltext/kqp_indexes_fulltext_ut.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/fulltext/kqp_indexes_fulltext_ut.cpp |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |84.3%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.3%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/push_map.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/push_map.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_convertion_utils.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_convertion_utils.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_convert_to_physical.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_convert_to_physical.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/audit_config/audit_config.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_config/audit_config.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/show_create/show_create.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/show_create.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |84.4%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |84.4%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |84.4%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_under_map.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_under_map.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |84.5%| [AR] {RESULT} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |84.5%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |84.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |84.5%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/common/config.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/config.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |84.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/1cd54909ea94c2b06e13875a05_raw.auxcpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |84.6%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.6%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/common.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/common.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/registration.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/registration.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |84.6%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.6%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/generic_manager.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/generic_manager.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |84.6%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/sessions/sessions.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/sessions/sessions.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_into_join.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/push_filter_into_join.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/inline_scalar_subplan.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/inline_scalar_subplan.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/prune_columns_stage.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/prune_columns_stage.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/program/resolver.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/program/resolver.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/program/libcore-tx-program.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/remove_extra_renames_stage.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/remove_extra_renames_stage.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |84.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |84.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |84.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |84.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_query_builder.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_query_builder.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/initializer.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/initializer.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |84.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |84.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/flat_executor_vacuum_logic.h_serialized.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.7%| [AR] {RESULT} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |84.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |84.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |84.8%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |84.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/libcore-control-lib.a |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/common/ss_dialog.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/ss_dialog.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_join_builder.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_join_builder.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/object.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/object.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |84.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |84.9%| [AR] {RESULT} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/apply_cbo.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/apply_cbo.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/config.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/config.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/constant_folding_stage.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/correlated_filter_pullup.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/correlated_filter_pullup.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/constant_folding_stage.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/add_index.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_index.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |85.0%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/build_initial_cbo_tree.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/build_initial_cbo_tree.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/initialization.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/initialization.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |85.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |85.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_source_builder.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_source_builder.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/common/timeout.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/timeout.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/protos/out/out.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/protos/out/out.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/out/libcore-protos-out.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/deleting.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/deleting.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/events.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/events.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/modification.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/modification.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/alter.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/common.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/common.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_filter_builder.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_filter_builder.cpp |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |85.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |85.2%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |85.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_stage_graph.cpp |85.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_stage_graph.cpp |85.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |85.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |85.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/manager.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/manager.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/abstract.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/abstract.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/98418d1c09486060ee803a5156_raw.auxcpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |85.2%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/compatibility/configs/dump/config-meta.json |85.2%| [UN] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/config-meta.json |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |85.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/742f1d972307bb9d01a727deea_raw.auxcpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |85.3%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/transfer_workload |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |85.3%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/transfer_workload |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libydb-core-protos.a |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |85.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/protos/libydb-core-protos.a |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut |85.3%| [LD] {RESULT} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |85.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |85.3%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.3%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |85.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |85.3%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |85.4%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/common.cpp |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |85.4%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/common.cpp |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/restore.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/restore.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |85.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |85.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |85.4%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |85.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |85.4%| [LD] {RESULT} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/result_set_format ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/result_set_format |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/result_set_format |85.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.4%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |85.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload/ydb_cli ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload/ydb_cli |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload/ydb_cli ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload/ydb_cli |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/workload/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/workload/ydb_cli |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/stability/healthcheck/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/stability/healthcheck/ydb_cli |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload/ydb_cli |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/s3_backups ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/s3_backups |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/ydb_cli |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload/ydb_cli |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |85.5%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv_volume/tests/ydb-tests-stress-kv_volume-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kv_volume/tests/ydb-tests-stress-kv_volume-tests |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv_volume/tests/ydb-tests-stress-kv_volume-tests |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |85.5%| [LD] {RESULT} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/workload/ydb_cli ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/workload/ydb_cli |85.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |85.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |85.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |85.5%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/s3_backups_workload |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |85.5%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/s3_backups_workload |85.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/olap_workload ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/olap_workload ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/inline_simple_in_exists_subplan.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/inline_simple_in_exists_subplan.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/ut/ydb-core-config-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/config/ut/ydb-core-config-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload/objcopy_ad01b515e6ee8206b6b9fafbc1.o |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |85.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload/objcopy_d6a1b41fc15f14862f8ec129ed.o |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |85.6%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload/objcopy_456ef6a09159ceaa7dd7dffa59.o |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |85.6%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/olap_workload |85.6%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/olap_workload |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/stability/healthcheck/objcopy_75ddb319f6ea5a5e05fb01b29c.o |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/workload/objcopy_3cad7ffe5c68e883c93b3c3b3a.o |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |85.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload/objcopy_33abb7f7b873fc17cb192a5592.o |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.6%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |85.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |85.6%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/stability/healthcheck/libpy3library-stability-healthcheck.global.a |85.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_49e50404ca7d8e93240b3617b2.o |85.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload_mixed ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |85.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/workload_mixed ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/workload/libpy3stress-log-workload.global.a |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |85.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/compressors/ut/ydb-library-yql-providers-s3-compressors-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/compressors/ut/ydb-library-yql-providers-s3-compressors-ut |85.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/compressors/ut/ydb-library-yql-providers-s3-compressors-ut |85.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |85.7%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |85.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |85.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/workload/objcopy_43f03d8ce0f7a500b6680c8688.o ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload_topic ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/workload_topic |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload_topic |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload_kv ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload_kv |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/workload_kv |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/tstool/tstool ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/tstool/tstool |85.7%| [LD] {RESULT} $(B)/ydb/tools/tstool/tstool |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/workload_log ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/workload_log |85.7%| [LD] {RESULT} $(B)/ydb/tests/stress/log/workload_log |85.7%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/topic_kafka_workload |85.7%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/topic_kafka_workload |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |85.7%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |85.7%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/mixedpy/workload/objcopy_ad01b515e6ee8206b6b9fafbc1.o |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |85.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |85.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/kafka_streams_test ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |85.8%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/kafka_streams_test ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |85.8%| [LD] {RESULT} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |85.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |85.8%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/topic_workload |85.8%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/topic_workload |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |85.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |85.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |85.8%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |85.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |85.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/objcopy_d6a1b41fc15f14862f8ec129ed.o |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |85.8%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |85.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic/workload/objcopy_456ef6a09159ceaa7dd7dffa59.o |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |85.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/library/stability/healthcheck/objcopy_75ddb319f6ea5a5e05fb01b29c.o |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |85.8%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/oltp_workload ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |85.8%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |85.8%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/kafka_workload |85.8%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/kafka_workload |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_sort_builder.cpp |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |85.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |85.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/log/workload/objcopy_3cad7ffe5c68e883c93b3c3b3a.o |85.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kv/workload/objcopy_33abb7f7b873fc17cb192a5592.o |85.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_sort_builder.cpp |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/library/stability/healthcheck/libpy3library-stability-healthcheck.global.a |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |85.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_49e50404ca7d8e93240b3617b2.o |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/log/workload/libpy3stress-log-workload.global.a |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |85.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/kafka/workload/objcopy_43f03d8ce0f7a500b6680c8688.o ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |85.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |85.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |85.9%| [LD] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |85.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |85.9%| [LD] {RESULT} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/replay/replay ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |85.9%| [LD] {RESULT} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |85.9%| [LD] {RESULT} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |85.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |85.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |85.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |86.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/oltp_workload |86.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/oltp_workload |86.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/peephole_predicate.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/peephole_predicate.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_compression/alter/ydb-tests-olap-column_compression-alter ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {RESULT} $(B)/ydb/tests/olap/column_compression/alter/ydb-tests-olap-column_compression-alter |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_compression/alter/ydb-tests-olap-column_compression-alter |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |86.0%| [LD] {RESULT} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/cfg/bin/ydb_configure ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/cfg/bin/ydb_configure |86.0%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |86.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |86.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/events.cpp |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/events.cpp |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |86.0%| [LD] {RESULT} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |86.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |86.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |86.0%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |86.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |86.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |86.0%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/local_cluster/local_cluster ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/local_cluster/local_cluster |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |86.0%| [LD] {RESULT} $(B)/ydb/tests/tools/local_cluster/local_cluster |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |86.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |86.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |86.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/lib/auth/auth_helpers.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/auth/auth_helpers.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |86.1%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/cfg |86.1%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/cfg |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/tpch/tpch |86.1%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |86.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |86.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |86.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |86.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |86.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/statistics_workload/statistics_workload ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/driver/nemesis ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.1%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/driver/nemesis |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/driver/nemesis ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |86.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |86.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |86.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |86.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |86.2%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_compression/create/ydb-tests-olap-column_compression-create ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/olap/column_compression/create/ydb-tests-olap-column_compression-create |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_compression/create/ydb-tests-olap-column_compression-create |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/service_node/service_node |86.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/partcheck/partcheck |86.2%| [LD] {RESULT} $(B)/ydb/tools/partcheck/partcheck ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/security/ydb-tests-functional-security ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |86.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/partcheck/partcheck |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/tools/decrypt/decrypt |86.2%| [LD] {RESULT} $(B)/ydb/core/backup/tools/decrypt/decrypt ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |86.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/tools/decrypt/decrypt ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/executor.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/executor.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |86.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/statistics_workload |86.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/statistics_workload |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |86.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/nemesis |86.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/nemesis |86.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |86.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/tsserver/tsserver |86.2%| [LD] {RESULT} $(B)/ydb/tools/tsserver/tsserver ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.2%| [LD] {RESULT} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/local_ydb/local_ydb ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/local_ydb/local_ydb |86.3%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb |86.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.3%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.3%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |86.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |86.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |86.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.3%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |86.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |86.3%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |86.3%| [LD] {RESULT} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |86.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |86.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tsserver/tsserver |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |86.3%| [LD] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/ut/ydb-core-util-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/util/ut/ydb-core-util-ut |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |86.4%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |86.4%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |86.4%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |86.4%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |86.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |86.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tests/ydb-tests-stability-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tests/ydb-tests-stability-tests |86.4%| [LD] {RESULT} $(B)/ydb/tests/stability/tests/ydb-tests-stability-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |86.4%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |86.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |86.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {RESULT} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |86.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |86.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |86.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |86.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/extract_join_expressions.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.4%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/extract_join_expressions.cpp |86.4%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |86.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/service.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/service.cpp |86.4%| [LD] {RESULT} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |86.4%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |86.4%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |86.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |86.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/bin/mvp_meta |86.5%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/meta/bin/mvp_meta ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_compression/fast/ydb-tests-olap-column_compression-fast ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/olap/column_compression/fast/ydb-tests-olap-column_compression-fast |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_compression/fast/ydb-tests-olap-column_compression-fast |86.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/add_data.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_data.cpp |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |86.5%| [LD] {RESULT} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |86.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |86.5%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |86.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |86.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |86.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/ydb-tests-compatibility ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |86.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |86.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |86.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |86.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |86.6%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |86.6%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |86.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |86.6%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |86.6%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |86.6%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |86.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |86.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/large/ydb-tests-olap-large ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {RESULT} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |86.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |86.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |86.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload_testshard ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.6%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {RESULT} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |86.7%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |86.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |86.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |86.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |86.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |86.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |86.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |86.7%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |86.7%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |86.7%| [AR] {RESULT} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |86.8%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |86.8%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/assign_stages.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/assign_stages.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_map_builder.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_map_builder.cpp |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |86.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |86.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/expand_cbo_tree.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/expand_cbo_tree.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |86.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |86.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |86.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |86.8%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |86.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/push_olap_filter.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/push_olap_filter.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |86.9%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |86.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |86.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |86.9%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |86.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |86.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/request_actor.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor.cpp |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |87.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |87.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/write.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_aggregation_builder.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/physical_convertion/kqp_rbo_physical_aggregation_builder.cpp |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |87.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |87.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/rbo/physical_convertion/libopt-rbo-physical_convertion.a |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |87.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/physical_convertion/libopt-rbo-physical_convertion.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/shard_writer.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shard_writer.cpp >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ddisk/ut/ydb-core-blobstorage-ddisk-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ddisk/ut/ydb-core-blobstorage-ddisk-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ddisk/ut/ydb-core-blobstorage-ddisk-ut |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp >> DiscoveryConverterTest::AccountDatabase [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] |87.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |87.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |87.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |87.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::CmWay [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp >> TSegmentManagerTest::AdjacentRangesDoNotOverlap [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::AdjacentRangesDoNotOverlap [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab >> TActorTest::TestFilteredGrab [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> VDiskTest::HugeBlobAllocationRace >> VDiskCompactionTests::FreshCompactionKeepsDoNotKeepFlag |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] >> VDiskCompactionTests::FreshCompactionKeepsDoNotKeepFlag [GOOD] >> TActorTest::TestWaitForFirstEvent |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskCompactionTests::FreshCompactionKeepsDoNotKeepFlag [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |87.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |87.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a >> VDiskTest::HugeBlobRecompaction |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> VDiskTest::HugeBlobRecompaction [GOOD] >> TSegmentManagerTest::OverlapAcrossSeveralRequestsTrimsSidesAndOutdatesMiddle [GOOD] >> TSegmentManagerTest::MiddleOverlapSplitsExistingRequest [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TActorTest::TestWaitFuture [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobRecompaction [GOOD] Test command err: Change MinHugeBlobSize# 102400 Change MinHugeBlobSize# 32513 >> VDiskTest::HugeBlobAllocationRace [GOOD] >> TSegmentManagerTest::PopUnknownRequestReturnsMaxSyncId [GOOD] >> TDDiskActorTest::SessionValidation [GOOD] >> TDDiskActorTest::WriteThenReadRoundtrip [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::OverlapAcrossSeveralRequestsTrimsSidesAndOutdatesMiddle [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::MiddleOverlapSplitsExistingRequest [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobAllocationRace [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::PopUnknownRequestReturnsMaxSyncId [GOOD] >> TDDiskActorTest::ReadFromUnallocatedChunkReturnsZeroes >> TDDiskActorTest::ReadFromUnallocatedChunkReturnsZeroes [GOOD] >> TDDiskActorTest::PersistentBufferLifecycle [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TDDiskActorTest::WriteThenReadRoundtrip [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |87.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |87.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TDDiskActorTest::PersistentBufferLifecycle [GOOD] >> TSegmentManagerTest::PushPopSingleRequest [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp >> TSegmentManagerTest::CoverSeveralRequestsCollectsAllOutdated [GOOD] |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |87.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |87.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> SysViewQueryHistory::AddDedupRandom [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::PushPopSingleRequest [GOOD] >> TDDiskActorTest::ConnectGenerationRules [GOOD] >> TDDiskActorTest::IncorrectRequestValidation [GOOD] >> SysViewQueryHistory::StableMerge [GOOD] >> SysViewQueryHistory::AggrMergeDedup [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::CoverSeveralRequestsCollectsAllOutdated [GOOD] >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime [GOOD] >> SysViewQueryHistory::TopDurationAdd [GOOD] >> SysViewQueryHistory::AddDedup [GOOD] >> SysViewQueryHistory::AddDedup2 [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedupRandom [GOOD] |87.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TDDiskActorTest::IncorrectRequestValidation [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] |87.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] >> SamplingControlTests::Simple [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedup2 [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] >> SamplingControlTests::EdgeCaseUpper [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp >> ThrottlerControlTests::Overflow_1 [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ThrottlerControlTests::Overflow_2 [GOOD] |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a >> SamplingControlTests::EdgeCaseLower [GOOD] >> ThrottlerControlTests::LongIdle [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay >> TActorTest::TestCreateChildActor [GOOD] >> TActorTest::TestBlockEvents >> TActorTest::TestSendAfterDelay [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] >> TActorTest::TestBlockEvents [GOOD] |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |87.3%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> DSProxyStrategyTest::Restore_mirror3dc |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TopicNameConverterTest::Paths >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> TActorTest::TestWaitFor [GOOD] |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp >> DSProxyStrategyTest::Restore_block42 >> NameserviceConfigValidatorTests::TestRemoveTooMany [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> TBlobStorageHullDecimal::TestToUi64 [GOOD] |87.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a >> TActorTest::TestScheduleEvent [GOOD] >> TActorTest::TestScheduleReaction |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/rules/inline_cbo_tree.cpp |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a >> TActorTest::TestScheduleReaction [GOOD] >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/rules/inline_cbo_tree.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TBlobStorageHullDecimal::TestMult [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] |87.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/rbo/rules/libopt-rbo-rules.a >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> TBlobStorageHullStorageRatio::Test [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] |87.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/rules/libopt-rbo-rules.a |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |87.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |87.4%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> TVDiskConfigTest::JustConfig |87.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |87.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] |87.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> TActorTest::TestSendFromAnotherThread |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] >> Scheme::TSerializedCellMatrix [GOOD] >> Scheme::OwnedCellVecFromSerialized [GOOD] >> Scheme::CellVecTryParse [GOOD] >> Scheme::CompareOrder [GOOD] >> Scheme::CompareUuidCells [GOOD] >> Scheme::TSerializedCellVec [GOOD] >> Scheme::UnsafeAppend [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyResolveHost [GOOD] >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] >> SchemeBorders::Full [GOOD] >> Scheme::YqlTypesMustBeDefined [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] >> BootstrapTabletsValidatorTests::TestNoNodeForTablet [GOOD] >> BootstrapTabletsValidatorTests::TestRequiredTablet [GOOD] >> BootstrapTabletsValidatorTests::TestImportantTablet [GOOD] >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> TActorTest::TestStateSwitch [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::OwnedCellVecFromSerialized [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::UnsafeAppend [GOOD] Test command err: Serialize: 0.001239s Cells constructor: 0.003394s Parse: 0.000231s Copy: 0.000178s Move: 0.000124s |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::CompareUuidCells [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::YqlTypesMustBeDefined [GOOD] >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestStateSwitch [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/fetcher.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/fetcher.cpp >> TypesProto::DecimalNoTypeInfo [GOOD] >> TypesProto::Decimal35 [GOOD] >> DiscoveryConverterTest::FullLegacyNames [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> Scheme::EmptyCell [GOOD] >> Scheme::CompareWithNullSemantics [GOOD] |87.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a >> Scheme::EmptyOwnedCellVec [GOOD] >> Scheme::NonEmptyOwnedCellVec [GOOD] |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] >> SchemeBorders::Partial [GOOD] >> SchemeRanges::CmpBorders [GOOD] >> TActorTest::TestSendFromAnotherThread [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedTaskName [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnknownQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnlimitedResource [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |87.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a >> Scheme::NullCell [GOOD] >> Scheme::NotEmptyCell [GOOD] >> SchemeRanges::RangesBorders [GOOD] >> TypesProto::Decimal22 [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> TypesProto::Decimal35 [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FirstClass [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::CompareWithNullSemantics [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::NonEmptyOwnedCellVec [GOOD] >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> SchemeRanges::CmpBorders [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] >> TRegistryTests::TestAddGet [GOOD] >> TRegistryTests::TestCheckConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroQueueWeight [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::NotEmptyCell [GOOD] |87.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> TypesProto::Decimal22 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic >> ReadBatcher::ReadBatcher >> NameserviceConfigValidatorTests::TestLongWalleDC [GOOD] >> NameserviceConfigValidatorTests::TestModifyClusterUUID [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] |87.4%| [TA] $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] >> ReadBatcher::Range >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp >> TBlobStorageQueueTest::TMessageLost [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp >> TQueueBackpressureTest::CreateDelete [GOOD] >> TQueueBackpressureTest::PerfTrivial |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> TDelayedResponsesTests::Test [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] |87.4%| [TA] $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |87.4%| [TA] {RESULT} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |87.4%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {RESULT} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |87.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.4%| [LD] {RESULT} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |87.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |87.5%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |87.5%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |87.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut >> KqpCompileServiceHelpers::CheckInvalidator [GOOD] |87.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |87.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |87.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |87.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |87.5%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |87.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] |87.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a >> TQueueBackpressureTest::PerfTrivial [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> KqpCompileServiceHelpers::CheckInvalidator [GOOD] Test command err: empty ResourceManager { MkqlHeavyProgramMemoryLimit: 31457280 } empty ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } modified: MkqlHeavyProgramMemoryLimit: 31457281 -> 31457280 ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |87.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] |87.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |87.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |87.6%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/tool ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |87.6%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] |87.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a >> TQueueBackpressureTest::PerfInFlight |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |87.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate [GOOD] |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |87.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |87.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |87.6%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.6%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [FAIL] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularQueueTest::Empty [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TQueueInplaceTests::DestroyInDestructor [GOOD] >> TQueueInplaceTests::EmplacePopDefault [GOOD] >> TQueueInplaceTests::PopTooManyTimes [GOOD] >> TQueueInplaceTests::MoveConstructor [GOOD] >> TQueueInplaceTests::MoveAssignment [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TULID::ParseAndFormat [GOOD] >> TULID::HeadByteOrder [GOOD] >> TULID::TailByteOrder [GOOD] >> TULID::EveryBitOrder [GOOD] >> TULID::Generate [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TWildcardTest::TestWildcards [GOOD] |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |87.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TWildcardTest::TestWildcards [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TCowBTreeTest::ClearAndReuse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularQueueTest::ShouldGetQueue [GOOD] Test command err: assertion failed at ydb/core/util/operation_queue_ut.cpp:722, virtual void NKikimr::NOperationQueue::NTestSuiteTCircularOperationQueueTest::TTestCaseShouldShuffle::Execute_(NUnitTest::TTestContext &): (starter.StartHistory != TVector({1, 2, 3, 4, 5})) TBackTrace::Capture()+28 (0x152189C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x15FA58C) NKikimr::NOperationQueue::NTestSuiteTCircularOperationQueueTest::TTestCaseShouldShuffle::Execute_(NUnitTest::TTestContext&)+8573 (0x106DABD) std::__y1::__function::__func, void ()>::operator()()+280 (0x10B9898) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x162981A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1601628) NKikimr::NOperationQueue::NTestSuiteTCircularOperationQueueTest::TCurrentTest::Execute()+1357 (0x10B8BAD) NUnitTest::TTestFactory::Execute()+2176 (0x1602DE0) NUnitTest::RunMain(int, char**)+5805 (0x162388D) ??+0 (0x7F9553CEED90) __libc_start_main+128 (0x7F9553CEEE40) _start+41 (0xADE029) >> TCowBTreeTest::ClearAndReuse [GOOD] >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe >> THyperLogCounterTest::TestIncrement |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAdd >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TSegmentManagerTest::FullCoverProducesOutdatedAndEmptyPop [GOOD] >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAdd >> SysViewQueryHistory::StableMerge2 [GOOD] |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference |87.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |87.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> TIntrusiveStackTest::TestPushPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCacheTest::Test2QCache [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference |87.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge2 [GOOD] >> AddressClassifierTest::TestAddressExtraction [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> TBTreeTest::Basics [GOOD] >> TBTreeTest::ClearAndReuse >> VDiskTest::HugeBlobWrite >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetUnion >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace |87.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ddisk/ut/unittest >> TSegmentManagerTest::FullCoverProducesOutdatedAndEmptyPop [GOOD] |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/blobsan/blobsan |87.8%| [LD] {RESULT} $(B)/ydb/tools/blobsan/blobsan >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix |87.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |87.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp >> ThrottlerControlTests::Simple [GOOD] >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] |87.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |87.8%| [TA] $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |87.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |87.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] $(B)/ydb/core/blobstorage/ddisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |87.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |87.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |87.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ddisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |87.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ddisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace |87.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TQueueBackpressureTest::PerfInFlight [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TChainLayoutBuilder::TestBucketsV2 [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapIntersection >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |87.8%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |87.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] >> TopTest::Test1 [GOOD] |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TChainLayoutBuilder::TestProdConf [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestBucketsV2 [GOOD] >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersection |87.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/blobsan/blobsan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 6 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 1242035 us ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |87.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |87.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |87.9%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |87.9%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageReplRecoveryMachine::BasicFunctionality >> HullReplWriteSst::Basic |87.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |87.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::Random32 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> VarLengthIntCodec::BasicTest64 [GOOD] >> VarLengthIntCodec::Random32 >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> FormatTimes::ParseDuration [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 >> TBlobStorageSyncLogDsk::SeveralChunks [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_OnePageIndexed [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_SeveralPagesIndexed [GOOD] >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention >> VarLengthIntCodec::Random32 [GOOD] >> VarLengthIntCodec::Random64 >> StatsFormat::FullStat [GOOD] |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/tools/memory_tests/bin/memory_tests |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/tools/memory_tests/bin/memory_tests |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/tools/memory_tests/bin/memory_tests >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace >> VarLengthIntCodec::Random64 [GOOD] >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe >> RunLengthCodec::Random32 >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::BasicTest32 [GOOD] >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> RunLengthCodec::Random32 [GOOD] >> RunLengthCodec::Random64 >> RunLengthCodec::Random64 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> Config::IncludeScope [GOOD] >> FormatTimes::DurationMs [GOOD] >> StatsFormat::AggregateStat [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |87.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::TrimLog [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] >> FormatTimes::DurationUs [GOOD] >> ReadBatcher::Range [GOOD] |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |87.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] >> Config::ExcludeScope |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::BasicTest64 [GOOD] |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp >> NaiveFragmentWriterTest::Long >> TIncrHugeBasicTest::Recovery [GOOD] >> Config::ExcludeScope [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalSetDifference |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> TIncrHugeBasicTest::Defrag >> NaiveFragmentWriterTest::Long [GOOD] >> ReorderCodecTest::Basic [GOOD] >> RunLengthCodec::BasicTest32 [GOOD] >> RunLengthCodec::BasicTest64 [GOOD] >> TIncrHugeBlobIdDict::Basic [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |87.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |87.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] >> TBlobStorageSyncLogMem::FilledIn1PutAfterSnapshot |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> RunLengthCodec::BasicTest64 [GOOD] >> TBlobStorageSyncLogMem::FilledIn1PutAfterSnapshot [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf >> TBlobStorageSyncLogKeeper::CutLog_EntryPointNewFormat [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLog [GOOD] >> TBlobStorageSyncLogMem::FilledIn1 [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh |88.0%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh >> TBsLocalRecovery::StartStopNotEmptyDB >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TBsLocalRecovery::MultiPutWriteRestartRead |88.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskGC::TGCManyVPutsDelTabletTest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBsVDiskExtreme::SimpleGetFromEmptyDB >> TBsOther1::PoisonPill |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> THugeMigration::ExtendMap_HugeBlobs >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace >> TBlobStorageSyncLogData::SerializeParseEmpty1_Proto [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty2_Proto [GOOD] >> SemiSortedDeltaCodec::Random32 >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> SemiSortedDeltaCodec::Random32 [GOOD] >> SemiSortedDeltaCodec::Random64 >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> SemiSortedDeltaCodec::Random64 [GOOD] >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> TBsDbStat::ChaoticParallelWrite_DbStat |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe >> TBsVDiskRepl1::ReplProxyKeepBits >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TopTest::Test2 [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::Random64 [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyPutGet >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtreme::Simple3Put3GetFresh >> TCowBTreeTest::MultipleSnapshots >> TBlobStorageHullHugeHeap::BorderValues [GOOD] >> TBlobStorageHullHugeHeap::LockChunks [GOOD] |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |88.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a >> TBsVDiskDefrag::DefragEmptyDB >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |88.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates [GOOD] >> FastLookupUniqueList::Stress >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::LockChunks [GOOD] >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl1::ReadOnly >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh >> PgTest::DumpIntCells |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> PgTest::DumpIntCells [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsVDiskRepl1::ReadOnly [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::SyncLogTest >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh |88.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |88.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction |88.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsBuildSwapSnapshot [GOOD] >> VarLengthIntCodec::BasicTest32 [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |88.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> FastLookupUniqueList::Stress [GOOD] >> StLog::Basic [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |88.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh |88.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> TPDiskErrorStateTests::Basic [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TTrackable::TString [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskGC::GCPutBarrierSync >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::BasicTest32 [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> TBsVDiskRepl3::SyncLogTest [GOOD] |88.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |88.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::SyncLogTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.07286666138 seconds Producer 1 worked for 0.09963403122 seconds Consumer 0 worked for 0.2890551042 seconds Consumer 1 worked for 0.2833136496 seconds Consumer 2 worked for 0.2748243318 seconds Consumer 3 worked for 0.321462832 seconds |88.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |88.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TFlatDatabasePgTest::BasicTypes |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TFlatDatabasePgTest::BasicTypes [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] |88.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |88.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |88.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a >> TBsVDiskGC::GCPutBarrierSync [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] |88.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |88.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] |88.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a >> TBlobStorageHullSstIt::TestSstIndexSeekAndIterate [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] >> TBlobStorageBarriersTreeTest::Tree [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] |88.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::GCPutBarrierSync [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] |88.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageIngressMatrix::VectorTestMinus [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] |88.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TMaxTracker::basic_test [GOOD] >> TMaxTracker::two_threads_collect_update_ops_per_sec |88.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageIngressMatrix::MatrixTest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |88.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageIngressMatrix::MatrixTest [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] |88.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |88.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |88.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::MatrixTest [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> TCowBTreeTest::SnapshotRollback >> TCowBTreeTest::SnapshotCascade [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseAnd [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> TBlobStorageIngress::Ingress [GOOD] >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] >> TBlobStorageHullFresh::Perf |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] >> TBlobStorageHullFresh::AppendixPerf |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] |88.2%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups |88.2%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] >> TBlobStorageCompStrat::Test1 >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] >> PgTest::DumpStringCells >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> PgTest::DumpStringCells [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> TBlobStorageCompStrat::Test1 [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp >> TBlobStorageHullFresh::Perf [GOOD] >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase >> TMonitoring::ReregisterTest [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> RuntimeFeatureFlags::DefaultValues >> TValue::FieldsCount [GOOD] >> TValue::IsNull [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] >> RuntimeFeatureFlags::ConversionToProto [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] |88.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/public/lib/value/ut/gtest >> TValue::IsNull [GOOD] >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] |88.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom |88.3%| [TS] {RESULT} ydb/public/lib/value/ut/gtest >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc >> Json::BasicRendering [GOOD] |88.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> THazardTest::CachedPointers [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> THyperLogCounterTest::TestAddRandom |88.3%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed [GOOD] |88.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] |88.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> THyperLogCounterTest::TestAddFixed [GOOD] |88.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/export/session/session.cpp >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::ShouldCallDtorsInplace |88.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TCowBTreeTest::Concurrent |88.3%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/session.cpp >> TCowBTreeTest::Concurrent [GOOD] >> TCowBTreeTest::Alignment [GOOD] |88.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a >> TContinueFromStreamingOffsetsPlanTest::MultipleTopics >> TContinueFromStreamingOffsetsPlanTest::Empty [GOOD] >> TContinueFromStreamingOffsetsPlanTest::OneToOneMapping [GOOD] >> TContinueFromStreamingOffsetsPlanTest::DifferentPartitioning [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> TContinueFromStreamingOffsetsPlanTest::MultipleTopics [GOOD] >> TContinueFromStreamingOffsetsPlanTest::AllTopicsMustBeUsedInNonForceMode [GOOD] >> TContinueFromStreamingOffsetsPlanTest::NotMappedAllPartitions [GOOD] >> TContinueFromStreamingOffsetsPlanTest::ReadPartitionInSeveralPlacesIsOk [GOOD] >> TContinueFromStreamingOffsetsPlanTest::MapSeveralReadingsToOneIsAllowedOnlyInForceMode [GOOD] |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/attributes/ut/unittest >> AttributesMD5Test::AmazonSampleWithString [GOOD] |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/dq/state/ut/unittest >> TContinueFromStreamingOffsetsPlanTest::MapSeveralReadingsToOneIsAllowedOnlyInForceMode [GOOD] >> TMaxTracker::two_threads_collect_update_ops_per_sec [GOOD] >> TNumericalMaybeTest::Types [GOOD] >> TNumericalMaybeTest::Constructor [GOOD] >> TNumericalMaybeTest::AssignmentUnsetValue [GOOD] >> TNumericalMaybeTest::AssignmentSetValue [GOOD] >> TNumericalMaybeTest::ComparisonUnsetValue [GOOD] >> TNumericalMaybeTest::ComparisonSetValue [GOOD] >> TNumericalMaybeTest::Clear [GOOD] >> TNumericalMaybeTest::StreamOutput [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::DeleteInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> OldFormat::SameVersion [GOOD] >> OldFormat::DefaultRules [GOOD] >> OldFormat::PrevYear [GOOD] >> OldFormat::Trunk [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> OldFormat::TooOld [GOOD] >> OldFormat::OldNbs [GOOD] >> VersionParser::Basic [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> YdbVersion::DefaulPatchTag [GOOD] >> YdbVersion::LimitOld [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::StoredWithRules [GOOD] >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::YDBAndNbs [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> YdbVersion::WithPatchAndWithoutPatch [GOOD] >> YdbVersion::AcceptSpecificHotfixWithoutPatch [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] >> TPartitionKeyRangeSequenceTest::ValidSinglePartition [GOOD] >> TPartitionKeyRangeSequenceTest::ValidSequence [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlapLong [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidContains [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverHi [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOrder [GOOD] >> TPartitionKeyRangeSequenceTest::ValidFivePartitions [GOOD] >> TPartitionKeyRangeSequenceTest::EmptyInput [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidGap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverLo [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.1676698186 seconds Producer 1 worked for 0.1531252749 seconds Consumer 0 worked for 0.09319193537 seconds on a snapshot of size 20000 Consumer 1 worked for 0.1706115678 seconds on a snapshot of size 40000 Consumer 2 worked for 0.2364028093 seconds on a snapshot of size 60000 Consumer 3 worked for 0.2443584571 seconds on a snapshot of size 80000 Consumers had 1199992 successful seeks |88.3%| [TS] {RESULT} ydb/core/ymq/attributes/ut/unittest |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |88.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/partition_key_range/ut/unittest >> TPartitionKeyRangeSequenceTest::InvalidFullCoverLo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::PrintCurrentVersionProto [GOOD] Test command err: Application: "ydb" >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet |88.3%| [TS] {RESULT} ydb/library/yql/dq/state/ut/unittest |88.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a >> BlobTest::Flags_HasPartData [GOOD] >> BlobTest::Flags_HasKinesisData [GOOD] >> BlobTest::Flags_HasCreateTimestamp [GOOD] >> BlobTest::Flags_HasUncompressedSize [GOOD] >> ClientBlobSerialization::EmptyPayload [GOOD] >> BlobTest::Flags_HasWriteTimestamp [GOOD] >> ClientBlobSerialization::SerializeAndDeserializeAllScenarios |88.3%| [TA] $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge |88.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> IncompatibilityRules::BasicPatternMatching [GOOD] >> IncompatibilityRules::EmptyLabelMatching [GOOD] >> IncompatibilityRules::UnsetLabelMatching [GOOD] >> IncompatibilityRules::AddAndRemoveRules [GOOD] >> IncompatibilityRules::RuleOverride [GOOD] >> IncompatibilityRules::SimpleIncompatibilityCheck [GOOD] >> IncompatibilityRules::DisableRules [GOOD] >> IncompatibilityRules::MergeRules [GOOD] >> IncompatibilityRules::ParseEmptyOverrides [GOOD] >> IncompatibilityRules::ParseDisableRules [GOOD] >> IncompatibilityRules::ParseCustomRules [GOOD] >> IncompatibilityRules::ParseUnsetMarker [GOOD] >> IncompatibilityRules::ParseEmptyMarker [GOOD] >> IncompatibilityRules::DeterministicOrdering [GOOD] >> IncompatibilityRules::ComplexMultiPatternRule [GOOD] >> IncompatibilityRules::IntegrationWithResolveAll [GOOD] >> IncompatibilityRules::CheckLabelsMapCompatibility [GOOD] >> IncompatibilityRules::CheckLabelsMapWithUnsetMarker [GOOD] >> IncompatibilityRules::ValueInOperator [GOOD] >> IncompatibilityRules::ValueInMultipleValues [GOOD] >> IncompatibilityRules::NegatedFlag [GOOD] >> IncompatibilityRules::NegatedWithValueIn [GOOD] >> IncompatibilityRules::ComplexRuleWithNegation [GOOD] >> IncompatibilityRules::MonostateMatchesAnyValue [GOOD] >> IncompatibilityRules::RealWorldScenario_DynamicNodesValidation [GOOD] >> IncompatibilityRules::ComplexValueInNegationCombination [GOOD] >> IncompatibilityRules::EdgeCase_EmptyValueIn [GOOD] >> IncompatibilityRules::EdgeCase_MultipleUnsetEmptyInValueIn [GOOD] >> IncompatibilityRules::BuiltInRules_RequiredLabels [GOOD] >> IncompatibilityRules::BuiltInRules_StaticNodes |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IncompatibilityRules::BuiltInRules_StaticNodes [GOOD] >> IncompatibilityRules::BuiltInRules_DynamicNodes [GOOD] >> IncompatibilityRules::BuiltInRules_CloudNodes [GOOD] >> IncompatibilityRules::BuiltInRules_SlotNodes [GOOD] >> IncompatibilityRules::BuiltInRules_DisableSpecificRules [GOOD] >> IncompatibilityRules::BuiltInRules_DisableAll [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::Escape [GOOD] |88.4%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TS] {RESULT} ydb/core/persqueue/public/partition_key_range/ut/unittest >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfig::FuseConfigs_ConsoleWins [GOOD] >> YamlConfig::FuseConfigs_BaseFillsGaps [GOOD] >> YamlConfig::FuseConfigs_EmptyBase [GOOD] >> YamlConfig::FuseConfigs_EmptyConsoleConfig [GOOD] >> YamlConfig::FuseConfigs_ExcludesStorageOnlyKeys [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] >> YamlConfigResolveUnique::NotUniqueSelectors [GOOD] >> YamlConfigResolveUnique::AllTestConfigs >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased >> YamlConfigResolveUnique::AllTestConfigs [GOOD] |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::Escape [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> ClientBlobSerialization::SerializeAndDeserializeAllScenarios [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TMemoryPoolTest::LongRollback >> TMemoryPoolTest::AllocOneByte [GOOD] >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TMemoryPoolTest::AppendString [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> UtilString::ShrinkToFit [GOOD] >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |88.4%| [TS] {RESULT} ydb/core/log_backend/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/blob/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: payloadSize 0 totalSize 100 partsCount 100 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount ... Size 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === Size: 128 Create chunk: 0.000021s Read by index: 0.000008s Iterate: 0.000009s Size: 252 Create chunk: 0.000023s Read by index: 0.000010s Iterate: 0.000010s Size: 8002 Create chunk: 0.000114s Read by index: 0.000014s Iterate: 0.000050s Size: 8256 Create chunk: 0.000092s Read by index: 0.000019s Iterate: 0.000076s Size: 8532 Create chunk: 0.000088s Read by index: 0.000075s Iterate: 0.000066s Size: 7769 Create chunk: 0.000128s Read by index: 0.000050s Iterate: 0.000044s Size: 2853 Create chunk: 0.000057s Read by index: 0.000055s Iterate: 0.000025s Size: 2419 Create chunk: 0.000053s Read by index: 0.000061s Iterate: 0.000027s Size: 2929 Create chunk: 0.000071s Read by index: 0.000072s Iterate: 0.000030s Size: 2472 Create chunk: 0.000070s Read by index: 0.000062s Iterate: 0.000027s Size: 1887 Create chunk: 0.000098s Read by index: 0.000088s Iterate: 0.000046s Size: 1658 Create chunk: 0.000058s Read by index: 0.000058s Iterate: 0.000032s Size: 1889 Create chunk: 0.000049s Read by index: 0.000053s Iterate: 0.000028s Size: 1660 Create chunk: 0.000046s Read by index: 0.000056s Iterate: 0.000028s Size: 2407 Create chunk: 0.000071s Read by index: 0.000093s Iterate: 0.000050s Size: 2061 Create chunk: 0.000076s Read by index: 0.000117s Iterate: 0.000046s |88.4%| [TA] $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_util/unittest >> UtilString::ShrinkToFit [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut/unittest >> YamlConfigResolveUnique::AllTestConfigs [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |88.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp >> TPGTest::TestLogin [GOOD] |88.4%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |88.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |88.4%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |88.4%| [TS] {RESULT} ydb/core/persqueue/pqtablet/blob/ut/unittest |88.4%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2026-02-19T08:04:00.768753Z :PGWIRE INFO: sock_listener.cpp:66: Listening on [::]:62849 2026-02-19T08:04:00.769628Z :PGWIRE DEBUG: pg_connection.cpp:61: (#13,[::1]:34136) incoming connection opened 2026-02-19T08:04:00.769822Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:34136) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2026-02-19T08:04:00.769991Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:34136) <- [1] 'R' "Auth" Size(4) OK |88.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp >> TFunctionsMetadataTest::Serialization |88.4%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp >> PtrTest::Test1 [GOOD] >> BufferWithGaps::Basic [GOOD] >> TBatchedVecTest::TestOutputTOutputType [GOOD] >> TBatchedVecTest::TestToStringInt [GOOD] >> BufferWithGaps::IsReadable [GOOD] >> TFunctionsMetadataTest::Serialization [GOOD] >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull |88.4%| [TA] {RESULT} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TTimeGridTest::TimeGrid [GOOD] >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ExternalSourceBuilderTest::ValidateName [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithoutCondition [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithCondition [GOOD] >> ExternalSourceBuilderTest::ValidateUnsupportedField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldOnCondition [GOOD] >> IcebergDdlTest::HiveCatalogWithS3Test [GOOD] >> IcebergDdlTest::HadoopCatalogWithS3Test [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::FailedOptionalTypeValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] >> ObjectStorageTest::FailedPartitionedByValidation >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/base/ut/gtest >> BufferWithGaps::IsReadable [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/metering/ut/unittest >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> ConfigProto::ForbidNewRequired |88.4%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] |88.4%| [TS] {RESULT} ydb/core/metering/ut/unittest |88.4%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest >> TCompressorTests::WrongMagicLz4 [GOOD] >> TCompressorTests::ErrorLz4 >> TCompressorTests::SuccessLz4 [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> DatabaseConfigValidation::AllowedFields >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] >> Init::TWithDefaultParser [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem >> TCompressorTests::ErrorLz4 [GOOD] >> TOutputQueueTests::TestOutputQueueBasic [GOOD] >> TOutputQueueTests::TestOutputQueueWithMaxItemSize [GOOD] >> TOutputQueueTests::TestOutputQueueWithMinItemSize [GOOD] >> TOutputQueueTests::TestOutputQueueWithMinMaxItemSize [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/public/ut/unittest >> DatabaseConfigValidation::AllowedFields [GOOD] >> DatabaseConfigValidation::NotAllowedFields [GOOD] >> MonitoringConfigValidation::RequireCountersAuthentication [GOOD] >> MonitoringConfigValidation::RequireHealthcheckAuthentication [GOOD] >> StateStorageConfigValidation::Empty [GOOD] >> StateStorageConfigValidation::Good [GOOD] >> StateStorageConfigValidation::NToSelect [GOOD] >> StateStorageConfigValidation::WriteOnly [GOOD] >> StateStorageConfigValidation::Disabled [GOOD] >> StateStorageConfigValidation::DisabledGood [GOOD] >> StateStorageConfigValidation::CanDisableAndChange [GOOD] >> StateStorageConfigValidation::CanChangeDisabled [GOOD] >> StateStorageConfigValidation::ChangesNotAllowed [GOOD] >> StateStorageConfigValidation::ValidateConfigSelfManagement [GOOD] >> StateStorageConfigValidation::ValidateConfigDomainEmpty [GOOD] >> StateStorageConfigValidation::ValidateConfigSSId [GOOD] >> StateStorageConfigValidation::ValidateConfigBad [GOOD] >> StateStorageConfigValidation::ValidateConfigValidatesStateStorage [GOOD] >> StateStorageConfigValidation::ValidateConfigGood [GOOD] >> StateStorageConfigValidation::ValidateConfigExplicitGood [GOOD] >> StateStorageConfigValidation::ValidateConfigExplicitBad [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorWithAnotherLabel [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorInheritance [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeId >> DoubleIndexedTests::TestErase [GOOD] >> DoubleIndexedTests::TestFind [GOOD] >> DoubleIndexedTests::TestMerge [GOOD] >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> ColumnShardConfigValidation::AcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::NotAcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::CorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::CorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::NotCorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::CorrectZSTDCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] |88.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeId [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeHost [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind [GOOD] >> XdsBootstrapConfig::CanSetHostnameAndDataCenterFromYdbNode >> XdsBootstrapConfig::CanSetHostnameAndDataCenterFromYdbNode [GOOD] >> XdsBootstrapConfig::CanSetDataCenterFromYdbNodeArgument [GOOD] >> XdsBootstrapConfig::CanCheckThatXdsBootstrapIsAbsent [GOOD] >> XdsBootstrapConfig::CanUseNodeIdFromYamlConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] Test command err: Collect ops/sec: 19672087.77 |88.4%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |88.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> XdsBootstrapConfig::CanUseNodeIdFromYamlConfig [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/compressors/ut/unittest >> TOutputQueueTests::TestOutputQueueWithMinMaxItemSize [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |88.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |88.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::LongTxId [GOOD] |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |88.4%| [TS] {RESULT} ydb/core/config/ut/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/ut/unittest >> StateStorageConfigValidation::ValidateConfigExplicitBad [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] |88.4%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |88.4%| [TS] {RESULT} ydb/library/yql/providers/s3/compressors/ut/unittest >> TMicrosecondsSlidingWindow::Basic [GOOD] |88.4%| [TS] {RESULT} ydb/core/external_sources/ut/unittest |88.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/init/ut/unittest >> XdsBootstrapConfig::CanUseNodeIdFromYamlConfig [GOOD] |88.4%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest >> Mirror3of4::ReplicationSmall |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> EncryptedFileSerializerTest::WrongParametersForSerializer [GOOD] >> EncryptedFileSerializerTest::SerializeWholeFileAtATime [GOOD] >> EncryptedFileSerializerTest::WrongParametersForDeserializer >> EncryptedFileSerializerTest::WrongParametersForDeserializer [GOOD] >> EncryptedFileSerializerTest::SplitOnBlocks |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] |88.4%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest |88.4%| [TS] {RESULT} ydb/core/config/validation/column_shard_config_validator_ut/unittest |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/common/ut/unittest >> TMicrosecondsSlidingWindow::Basic [GOOD] >> EncryptedFileSerializerTest::SplitOnBlocks [GOOD] >> EncryptedFileSerializerTest::EmptyFile [GOOD] >> EncryptedFileSerializerTest::ReadPartial [GOOD] >> EncryptedFileSerializerTest::DeleteLastByte [GOOD] >> EncryptedFileSerializerTest::AddByte [GOOD] >> EncryptedFileSerializerTest::RemoveLastBlock [GOOD] >> EncryptedFileSerializerTest::ChangeAnyByte [GOOD] >> EncryptedFileSerializerTest::BigHeaderSize [GOOD] >> EncryptedFileSerializerTest::BigBlockSize [GOOD] >> EncryptedFileSerializerTest::RestoreFromState [GOOD] >> EncryptedFileSerializerTest::IVSerialization [GOOD] >> PathsNormalizationTest::NormalizeItemPath [GOOD] >> PathsNormalizationTest::NormalizeItemPrefix [GOOD] >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] |88.5%| [TS] {RESULT} ydb/core/config/init/ut/unittest |88.5%| [TS] {RESULT} ydb/core/persqueue/common/ut/unittest |88.5%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |88.5%| [TS] {RESULT} ydb/core/config/validation/ut/unittest >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow >> ArrowTest::BatchBuilder |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail >> ArrowTest::BatchBuilder [GOOD] >> ArrowTest::BatchBuilder_Decimal [GOOD] >> ArrowTest::BatchBuilder_Uuid [GOOD] >> ArrowTest::ArrowToYdbConverter >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/backup/common/ut/unittest >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::ArrowToYdbConverter_Decimal [GOOD] >> ArrowTest::ArrowToYdbConverter_Uuid [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ArrowTest::MaxVersionFilter [GOOD] >> ArrowTest::EqualKeysVersionFilter [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> ColumnFilter::ApplyFilterToFilter [GOOD] >> ColumnFilter::FilterSlice [GOOD] >> ColumnFilter::FilterCheckSlice [GOOD] >> ColumnFilter::FilterSlice1 [GOOD] >> ColumnFilter::CutFilter1 [GOOD] >> ColumnFilter::CutFilter2 [GOOD] >> Dictionary::Simple >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] >> Mvp::OidcImpersonationStartFlow [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId [GOOD] >> Mvp::OidcImpersonationStopFlow [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource >> Mvp::OidcImpersonatedAccessToProtectedResource [GOOD] >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseYandex [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseNebius [GOOD] >> Mvp::OidcWhoami200 >> Backpressure::MonteCarlo >> Mvp::OidcWhoami200 [GOOD] >> Mvp::OidcWhoamiServiceAccount200 [GOOD] >> Mvp::OidcWhoamiBadIam200 [GOOD] >> Mvp::OidcWhoamiBadYdb200 |88.5%| [TS] {RESULT} ydb/core/backup/common/ut/unittest |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a >> Mvp::OidcWhoamiBadYdb200 [GOOD] >> Mvp::OidcWhoamiBadYdbServiceAccount200 [GOOD] >> Mvp::OidcWhoamiNoInfo500 [GOOD] >> Mvp::OidcWhoamiForward307 [GOOD] >> Mvp::OidcYandexIgnoresWhoamiExtension >> FormatCSV::Instants [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common >> Mvp::OidcYandexIgnoresWhoamiExtension [GOOD] >> Mvp::GetAddressWithoutPort [GOOD] >> HttpProxy::BasicParsing [GOOD] >> HttpProxy::HeaderParsingError_Request [GOOD] >> HttpProxy::HeaderParsingError_Response [GOOD] >> HttpProxy::GetWithSpecifiedContentType [GOOD] >> HttpProxy::BasicParsingChunkedBodyRequest [GOOD] >> HttpProxy::BasicParsingChunkedBigBodyRequest [GOOD] >> HttpProxy::BasicParsingBigHeadersRequest [GOOD] >> HttpProxy::BrokenParsingMethod [GOOD] >> HttpProxy::BrokenParsingChunkedBodyRequest [GOOD] >> HttpProxy::BasicPost [GOOD] >> HttpProxy::BasicParsingChunkedBodyResponse [GOOD] >> HttpProxy::InvalidParsingChunkedBody [GOOD] >> HttpProxy::AdvancedParsingChunkedBody [GOOD] >> HttpProxy::CreateCompressedResponse [GOOD] >> HttpProxy::BasicPartialParsing [GOOD] >> HttpProxy::BasicPartialParsingChunkedBody [GOOD] >> HttpProxy::BasicParsingContentLength0 [GOOD] >> HttpProxy::AdvancedParsing [GOOD] >> HttpProxy::AdvancedPartialParsing [GOOD] >> HttpProxy::BasicRenderBodyWithHeadersAndCookies [GOOD] |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a >> HttpProxy::BasicRenderOutgoingResponse >> FormatCSV::Common [GOOD] >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls [GOOD] >> HttpProxy::BasicRenderOutgoingResponse [GOOD] >> HttpProxy::BasicRunning4 [GOOD] >> HttpProxy::BasicRunning6 >> TImportantConsumerOffsetTrackerTest::EmptyConsumersList [GOOD] >> TImportantConsumerOffsetTrackerTest::Unbound [GOOD] >> TImportantConsumerOffsetTrackerTest::Expired [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting300 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting400 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400And500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400AndRead500 [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerMaxRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerFiniteRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchMultipleConsumers [GOOD] >> TImportantConsumerOffsetTrackerTest::MultipleExactMatches [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundKeepNextBlob [GOOD] >> TDeduplicatorTest::AddMessage [GOOD] >> TDeduplicatorTest::AddTwoMessages [GOOD] >> TDeduplicatorTest::AddDeduplicatedMessages [GOOD] >> TDeduplicatorTest::AddTwoMessages_DifferentTime_OneBucket [GOOD] >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] >> HttpProxy::BasicRunning6 [GOOD] >> HttpProxy::TlsRunning >> AuthConfigValidation::AcceptValidPasswordComplexity [GOOD] >> AuthConfigValidation::CannotAcceptInvalidPasswordComplexity [GOOD] >> AuthConfigValidation::AcceptValidAccountLockoutConfig [GOOD] >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] >> HttpProxy::TlsRunning [GOOD] >> HttpProxy::TooLongURL >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |88.5%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpProxy::TooLongURL [GOOD] >> HttpProxy::TooLongHeader >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/io_formats/arrow/scheme/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ >> HttpProxy::TooLongHeader [GOOD] >> HttpProxy::HeaderWithoutAColon >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::GetAddressWithoutPort [GOOD] Test command err: 2026-02-19T08:04:04.306580Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.306987Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.320517Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.320749Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.330057Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.330397Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2026-02-19T08:04:04.342639Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.343077Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2026-02-19T08:04:04.351214Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.351537Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2026-02-19T08:04:04.360126Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.360429Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2026-02-19T08:04:04.387879Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.387997Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.388251Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2026-02-19T08:04:04.388308Z :MVP DEBUG: oidc_protected_page.cpp:143: Try to send request to HTTPS port 2026-02-19T08:04:04.388345Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.388592Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.393971Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.394035Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.394242Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2026-02-19T08:04:04.426995Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.427120Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.427436Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2026-02-19T08:04:04.433642Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.433741Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.434030Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2026-02-19T08:04:04.440226Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.440319Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.440621Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2026-02-19T08:04:04.446642Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.446727Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.447017Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2026-02-19T08:04:04.453371Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.453493Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.453819Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2026-02-19T08:04:04.483935Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2026-02-19T08:04:04.484544Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2026-02-19T08:04:04.484609Z :MVP DEBUG: oidc_protected_page_nebius.cpp:96: Exchange session token 2026-02-19T08:04:04.484864Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2026-02-19T08:04:04.484945Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.485177Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.517141Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 401 2026-02-19T08:04:04.546450Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2026-02-19T08:04:04.547184Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.547687Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.552550Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2026-02-19T08:04:04.558047Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.558134Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.558457Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.594871Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2026-02-19T08:04:04.595598Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.595952Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.600280Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2026-02-19T08:04:04.604703Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2026-02-19T08:04:04.604765Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.605040Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:04.629807Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.629974Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2026-02-19T08:04:04.654415Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.654578Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2026-02-19T08:04:04.670919Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.671434Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.676395Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 401 2026-02-19T08:04:04.700685Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.701086Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.705965Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2026-02-19T08:04:04.730121Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.730703Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.736352Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2026-02-19T08:04:04.764937Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.765411Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2026-02-19T08:04:04.771291Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 412 2026-02-19T08:04:04.802951Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2026-02-19T08:04:04.808918Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2026-02-19T08:04:04.813959Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2026-02-19T08:04:04.831922Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.832391Z :MVP DEBUG: extension.cpp:14: Can not process request to protected resource: GET /ydb.viewer.page/counters HTTP/1.1 Host: oidcproxy.net Authorization: 2026-02-19T08:04:04.847233Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.847389Z :MVP DEBUG: oidc_session_create.cpp:43: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2026-02-19T08:04:04.873346Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2026-02-19T08:04:04.873506Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2026-02-19T08:04:04.900977Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2026-02-19T08:04:04.901097Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2026-02-19T08:04:04.901161Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:49: Request impersonated token 2026-02-19T08:04:04.901403Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:100: Incoming response from authorization server: 200 2026-02-19T08:04:04.901501Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:89: Set impersonated cookie: (__Host_impersonated_cookie_636C69656E745F6964: aW1w****bg== (B126DD61)) 2026-02-19T08:04:04.936384Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2026-02-19T08:04:04.936503Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2026-02-19T08:04:04.961245Z :MVP DEBUG: oidc_cleanup_page.cpp:20: Clear cookie: (__Host_impersonated_cookie_636C69656E745F6964) 2026-02-19T08:04:04.986041Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2026-02-19T08:04:04.986113Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2026-02-19T08:04:04.986152Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2026-02-19T08:04:04.986177Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2026-02-19T08:04:04.986492Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2026-02-19T08:04:04.986577Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:04.986803Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:05.006660Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2026-02-19T08:04:05.006727Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2026-02-19T08:04:05.006779Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2026-02-19T08:04:05.006810Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2026-02-19T08:04:05.007104Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 401 OK 2026-02-19T08:04:05.007137Z :MVP DEBUG: oidc_protected_page_nebius.cpp:65: Getting access token: {"error": "bad_token"} 2026-02-19T08:04:05.007172Z :MVP DEBUG: oidc_protected_page_nebius.cpp:121: Clear impersonated cookie (__Host_impersonated_cookie_636C69656E745F6964) and retry 2026-02-19T08:04:05.033281Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.033642Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2026-02-19T08:04:05.033782Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2026-02-19T08:04:05.033883Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2026-02-19T08:04:05.033967Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2026-02-19T08:04:05.034027Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2026-02-19T08:04:05.053933Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.054310Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2026-02-19T08:04:05.054443Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2026-02-19T08:04:05.054521Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2026-02-19T08:04:05.054584Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2026-02-19T08:04:05.054684Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2026-02-19T08:04:05.085644Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.089431Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:05.092723Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2026-02-19T08:04:05.117312Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.120416Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2026-02-19T08:04:05.122889Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2026-02-19T08:04:05.145424Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.148498Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2026-02-19T08:04:05.150977Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2026-02-19T08:04:05.178199Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.181107Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2026-02-19T08:04:05.183852Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2026-02-19T08:04:05.205691Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.208581Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2026-02-19T08:04:05.210737Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2026-02-19T08:04:05.232326Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.236202Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2026-02-19T08:04:05.238137Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2026-02-19T08:04:05.259981Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.263463Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2026-02-19T08:04:05.296726Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2026-02-19T08:04:05.296983Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 >> HttpProxy::HeaderWithoutAColon [GOOD] >> HttpProxy::TooManyRequests >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> HttpProxy::TooManyRequests [GOOD] >> HttpProxy::ChunkedResponse1 [GOOD] >> HttpProxy::ChunkedResponse2 >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> HttpProxy::ChunkedResponse2 [GOOD] >> HttpProxy::ChunkedResponse3 >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest [GOOD] >> HttpProxy::ChunkedResponse3 [GOOD] >> HttpProxy::StreamingResponse1 |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/auth_config_validator_ut/unittest >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/ut/gtest >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] Test command err: Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000002 |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> ClosedIntervalSet::Union >> HttpProxy::StreamingResponse1 [GOOD] >> HttpProxy::StreamingResponse2 >> HttpProxy::StreamingResponse2 [GOOD] >> HttpProxy::StreamingResponse3 >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] >> TestUrlBuilder::Basic [GOOD] >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TestUrlBuilder::BasicWithEncoding [GOOD] >> TestUrlBuilder::UriOnly [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] |88.5%| [TS] {RESULT} ydb/core/io_formats/arrow/scheme/ut/unittest >> HttpProxy::StreamingResponse3 [GOOD] >> HttpProxy::StreamingFatResponse1 [GOOD] >> HttpProxy::StreamingCompressedFatResponse1 >> TArrowPushDown::MatchSeveralRowGroups [GOOD] >> TArrowPushDown::FilterEverything [GOOD] >> TArrowPushDown::SimplePushDown [GOOD] >> MetaCache::BasicForwarding >> PushdownTest::NoFilter >> MetaCache::BasicForwarding [GOOD] >> HttpProxy::StreamingCompressedFatResponse1 [GOOD] >> HttpProxy::StreamingCompressedFatRandomResponse1 >> MetaCache::TimeoutFallback [GOOD] >> PushdownTest::NoFilter [GOOD] >> PushdownTest::Equal [GOOD] |88.5%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest |88.5%| [TS] {RESULT} ydb/core/config/validation/auth_config_validator_ut/unittest >> PushdownTest::NotEqualInt32Int64 [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/ynks/000502/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/ynks/000502/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) >> HttpProxy::StreamingCompressedFatRandomResponse1 [GOOD] >> HttpProxy::StreamingResponseWithProgress1 >> PushdownTest::TrueCoalesce [GOOD] >> PushdownTest::CmpInt16AndInt32 [GOOD] >> HttpProxy::StreamingResponseWithProgress1 [GOOD] >> HttpProxy::RequestAfter307 >> PushdownTest::PartialAnd [GOOD] >> Masking::Format [GOOD] >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> HttpProxy::RequestAfter307 [GOOD] >> THttpProxyWithMTls::ValidClientCertificate >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::SimplePushDown [GOOD] |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/common/ut/unittest >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2026-02-19T08:04:06.709693Z :HTTP INFO: http_proxy_acceptor.cpp:90: Listening on http://[::]:6700 2026-02-19T08:04:06.710224Z :HTTP INFO: http_proxy_acceptor.cpp:90: Listening on http://[::]:19226 2026-02-19T08:04:06.710679Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:14:2061] 2026-02-19T08:04:06.710721Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:6700 2026-02-19T08:04:06.710917Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:6700) connecting... 2026-02-19T08:04:06.711110Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:6700) outgoing connection opened 2026-02-19T08:04:06.711231Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:6700) <- (GET /server) 2026-02-19T08:04:06.711466Z :HTTP DEBUG: http_proxy_incoming.cpp:105: (#12,[::ffff:127.0.0.1]:53000) incoming connection opened 2026-02-19T08:04:06.711641Z :HTTP DEBUG: http_proxy_incoming.cpp:197: (#12,[::ffff:127.0.0.1]:53000) -> (GET /server) 2026-02-19T08:04:06.711801Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:19226 with deadline 2026-02-19T08:05:06.711771Z 2026-02-19T08:04:06.711839Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2026-02-19T08:05:06.711771Z (+1771488306.711771s) 2026-02-19T08:04:06.711886Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:19226 timeout 30.000000s 2026-02-19T08:04:06.712121Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:16:2063] 2026-02-19T08:04:06.712151Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:19226 2026-02-19T08:04:06.712233Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:19226) connecting... 2026-02-19T08:04:06.712310Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:19226) outgoing connection opened 2026-02-19T08:04:06.712335Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:19226) <- (GET /server) 2026-02-19T08:04:06.712462Z :HTTP DEBUG: http_proxy_incoming.cpp:105: (#14,[::ffff:127.0.0.1]:48542) incoming connection opened 2026-02-19T08:04:06.712539Z :HTTP DEBUG: http_proxy_incoming.cpp:197: (#14,[::ffff:127.0.0.1]:48542) -> (GET /server) 2026-02-19T08:04:06.712754Z :HTTP DEBUG: http_proxy_incoming.cpp:331: (#14,[::ffff:127.0.0.1]:48542) <- (200 Found, 6 bytes) 2026-02-19T08:04:06.712843Z :HTTP DEBUG: http_proxy_incoming.cpp:392: (#14,[::ffff:127.0.0.1]:48542) connection closed 2026-02-19T08:04:06.713028Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#13,127.0.0.1:19226) -> (200 Found, 6 bytes) 2026-02-19T08:04:06.713116Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#13,127.0.0.1:19226) connection closed 2026-02-19T08:04:06.713257Z :HTTP DEBUG: meta_cache.cpp:146: Cache received successfull (200) response for /server 2026-02-19T08:04:06.713454Z :HTTP DEBUG: http_proxy_incoming.cpp:331: (#12,[::ffff:127.0.0.1]:53000) <- (200 Found, 6 bytes) 2026-02-19T08:04:06.713517Z :HTTP DEBUG: http_proxy_incoming.cpp:392: (#12,[::ffff:127.0.0.1]:53000) connection closed 2026-02-19T08:04:06.713648Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:6700) -> (200 Found, 6 bytes) 2026-02-19T08:04:06.713780Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:6700) connection closed 2026-02-19T08:04:06.713865Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:16:2063] 2026-02-19T08:04:06.714000Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:14:2061] 2026-02-19T08:04:06.748416Z :HTTP INFO: http_proxy_acceptor.cpp:90: Listening on http://[::]:18252 2026-02-19T08:04:06.748747Z :HTTP INFO: http_proxy_acceptor.cpp:90: Listening on http://[::]:3876 2026-02-19T08:04:06.749091Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:14:2061] 2026-02-19T08:04:06.749132Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:18252 2026-02-19T08:04:06.749271Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:18252) connecting... 2026-02-19T08:04:06.749387Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:18252) outgoing connection opened 2026-02-19T08:04:06.749452Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:18252) <- (GET /server) 2026-02-19T08:04:06.749698Z :HTTP DEBUG: http_proxy_incoming.cpp:105: (#12,[::ffff:127.0.0.1]:53508) incoming connection opened 2026-02-19T08:04:06.749819Z :HTTP DEBUG: http_proxy_incoming.cpp:197: (#12,[::ffff:127.0.0.1]:53508) -> (GET /server) 2026-02-19T08:04:06.749938Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:3876 with deadline 2026-02-19T08:14:06.749914Z 2026-02-19T08:04:06.749973Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2026-02-19T08:14:06.749914Z (+1771488846.749914s) 2026-02-19T08:04:06.750009Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:3876 timeout 30.000000s 2026-02-19T08:04:06.750160Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:16:2063] 2026-02-19T08:04:06.750189Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:3876 2026-02-19T08:04:06.750277Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:3876) connecting... 2026-02-19T08:04:06.750355Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:3876) outgoing connection opened 2026-02-19T08:04:06.750392Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:3876) <- (GET /server) 2026-02-19T08:04:06.750507Z :HTTP DEBUG: http_proxy_incoming.cpp:105: (#14,[::ffff:127.0.0.1]:35052) incoming connection opened 2026-02-19T08:04:06.750570Z :HTTP DEBUG: http_proxy_incoming.cpp:197: (#14,[::ffff:127.0.0.1]:35052) -> (GET /server) 2026-02-19T08:04:06.760885Z :HTTP ERROR: http_proxy_outgoing.cpp:124: (#13,127.0.0.1:3876) connection closed with error: Connection timed out 2026-02-19T08:04:06.761155Z :HTTP DEBUG: http_proxy_incoming.cpp:235: (#14,[::ffff:127.0.0.1]:35052) connection closed 2026-02-19T08:04:06.761550Z :HTTP WARN: meta_cache.cpp:151: Cache received failed response with error "Connection timed out" for /server - retrying locally 2026-02-19T08:04:06.761649Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:16:2063] 2026-02-19T08:04:06.772186Z :HTTP DEBUG: http_proxy_incoming.cpp:331: (#12,[::ffff:127.0.0.1]:53508) <- (200 Found, 6 bytes) 2026-02-19T08:04:06.772339Z :HTTP DEBUG: http_proxy_incoming.cpp:392: (#12,[::ffff:127.0.0.1]:53508) connection closed 2026-02-19T08:04:06.772583Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:18252) -> (200 Found, 6 bytes) 2026-02-19T08:04:06.772626Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:18252) connection closed 2026-02-19T08:04:06.772807Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:14:2061] >> PushdownTest::NotNull [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB >> PushdownTest::NotNullForDatetime [GOOD] >> PushdownTest::IsNull |88.5%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test >> PushdownTest::IsNull [GOOD] >> THttpProxyWithMTls::ValidClientCertificate [GOOD] >> THttpProxyWithMTls::UntrustedClientCertificate >> PushdownTest::StringFieldsNotSupported [GOOD] >> PushdownTest::StringFieldsNotSupported2 [GOOD] |88.5%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest >> PushdownTest::RegexpPushdown [GOOD] |88.5%| [TS] {RESULT} ydb/core/persqueue/pqtablet/partition/ut/gtest |88.5%| [TS] {RESULT} ydb/mvp/meta/ut/unittest |88.5%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest >> PushdownTest::DecimalPushdownPrecision10Scale0 [GOOD] >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> PushdownTest::DecimalPushdownPrecesion4Scale2 [GOOD] |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |88.5%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> THttpProxyWithMTls::UntrustedClientCertificate [GOOD] >> THttpProxyWithMTls::NoClientCertificate >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh |88.5%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest |88.5%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::DecimalPushdownPrecesion4Scale2 [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) OptionalIf over Bool 'trueExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) FlatMap with JustExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldExpr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) ResPullExpr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_decimal_precision10_scale0" '"col_decimal_precision4_scale2" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($34) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Decimal '10 '0)) (let $8 (DataType 'Decimal '4 '2)) (let $9 (DataType 'Double)) (let $10 (DataType 'DyNumber)) (let $11 (DataType 'Float)) (let $12 (DataType 'Int16)) (let $13 (DataType 'Int32)) (let $14 (DataType 'Int64)) (let $15 (DataType 'Int8)) (let $16 (DataType 'Interval)) (let $17 (DataType 'Json)) (let $18 (DataType 'JsonDocument)) (let $19 (DataType 'String)) (let $20 (DataType 'Timestamp)) (let $21 (DataType 'TzDate)) (let $22 (DataType 'TzDatetime)) (let $23 (DataType 'TzTimestamp)) (let $24 (DataType 'Uint16)) (let $25 (DataType 'Uint32)) (let $26 (DataType 'Uint64)) (let $27 (DataType 'Uint8)) (let $28 (DataType 'Utf8)) (let $29 (DataType 'Uuid)) (let $30 (DataType 'Yson)) (let $31 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_decimal_precision10_scale0" $7) '('"col_decimal_precision4_scale2" $8) '('"col_double" $9) '('"col_dynumber" $10) '('"col_float" $11) '('"col_int16" $12) '('"col_int32" $13) '('"col_int64" $14) '('"col_int8" $15) '('"col_interval" $16) '('"col_json" $17) '('"col_json_document" $18) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $9)) '('"col_optional_dynumber" (OptionalType $10)) '('"col_optional_float" (OptionalType $11)) '('"col_optional_int16" (OptionalType $12)) '('"col_optional_int32" (OptionalType $13)) '('"col_optional_int64" (OptionalType $14)) '('"col_optional_int8" (OptionalType $15)) '('"col_optional_interval" (OptionalType $16)) '('"col_optional_json" (OptionalType $17)) '('"col_optional_json_document" (OptionalType $18)) '('"col_optional_string" (OptionalType $19)) '('"col_optional_timestamp" (OptionalType $20)) '('"col_optional_tz_date" (OptionalType $21)) '('"col_optional_tz_datetime" (OptionalType $22)) '('"col_optional_tz_timestamp" (OptionalType $23)) '('"col_optional_uint16" (OptionalType $24)) '('"col_optional_uint32" (OptionalType $25)) '('"col_optional_uint64" (OptionalType $26)) '('"col_optional_uint8" (OptionalType $27)) '('"col_optional_utf8" (OptionalType $28)) '('"col_optional_uuid" (OptionalType $29)) '('"col_optional_yson" (OptionalType $30)) '('"col_string" $19) '('"col_timestamp" $20) '('"col_tz_date" $21) '('"col_tz_datetime" $22) '('"col_tz_timestamp" $23) '('"col_uint16" $24) '('"col_uint32" $25) '('"col_uint64" $26) '('"col_uint8" $27) '('"col_utf8" $28) '('"col_uuid" $29) '('"col_yson" $30))) (let $32 (DqSourceWrap $3 (DataSource '"generic" '"test_cluster") $31)) (let $33 (ResPull! world $1 (Key) $32 '('('type)) '"generic")) (return (Commit! $33 $1)) ) Dq source filter settings: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (== (Member $row '"col_int16") (Int16 '42)) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int ... ) '('"col_optional_datetime" (OptionalType $8)) '('"col_optional_double" (OptionalType $11)) '('"col_optional_dynumber" (OptionalType $12)) '('"col_optional_float" (OptionalType $13)) '('"col_optional_int16" (OptionalType $14)) '('"col_optional_int32" (OptionalType $15)) '('"col_optional_int64" (OptionalType $16)) '('"col_optional_int8" (OptionalType $17)) '('"col_optional_interval" (OptionalType $18)) '('"col_optional_json" (OptionalType $19)) '('"col_optional_json_document" (OptionalType $20)) '('"col_optional_string" (OptionalType $21)) '('"col_optional_timestamp" (OptionalType $22)) '('"col_optional_tz_date" (OptionalType $23)) '('"col_optional_tz_datetime" (OptionalType $24)) '('"col_optional_tz_timestamp" (OptionalType $25)) '('"col_optional_uint16" (OptionalType $26)) '('"col_optional_uint32" (OptionalType $27)) '('"col_optional_uint64" (OptionalType $28)) '('"col_optional_uint8" (OptionalType $29)) '('"col_optional_utf8" (OptionalType $30)) '('"col_optional_uuid" (OptionalType $31)) '('"col_optional_yson" (OptionalType $32)) '('"col_string" $21) '('"col_timestamp" $22) '('"col_tz_date" $23) '('"col_tz_datetime" $24) '('"col_tz_timestamp" $25) '('"col_uint16" $26) '('"col_uint32" $27) '('"col_uint64" $28) '('"col_uint8" $29) '('"col_utf8" $30) '('"col_uuid" $31) '('"col_yson" $32))) (let $34 (DqSourceWrap $5 (DataSource '"generic" '"test_cluster") $33)) (let $35 (ResWrite! world $1 (Key) (FlatMap $34 (lambda '($37) (OptionalIf (== (Member $37 $3) $4) $37))) '('('type)))) (return (Commit! $35 $1)) ) Dq source filter settings: filter_typed { comparison { operation: EQ left_value { column: "col_decimal_precision10_scale0" } right_value { typed_value { type { decimal_type { precision: 10 } } value { bytes_value: "\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" } } } } } GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (== (Member $row '"col_decimal_precision4_scale2") (Decimal '"-22.22" '"4" '"2") ) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (block '( (let $5 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $4 '"col_decimal_precision4_scale2") $5) $4)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (block '( (let $5 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $4 '"col_decimal_precision4_scale2") $5) $4)) )))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldPush filter lambda: ( (return (lambda '($1) (block '( (let $2 (Decimal '"-22.22" '"4" '"2")) (return (== (Member $1 '"col_decimal_precision4_scale2") $2)) )))) ) PhysicalOptimizer-PushFilterToReadTableExpr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Expr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Push filter. Lambda is already not emptyOptimized expr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_decimal_precision10_scale0" '"col_decimal_precision4_scale2" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 '"col_decimal_precision4_scale2") (let $4 (Decimal '"-22.22" '"4" '"2")) (let $5 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($36) (== (Member $36 $3) $4)))) (let $6 (DataType 'Bool)) (let $7 (DataType 'Date)) (let $8 (DataType 'Datetime)) (let $9 (DataType 'Decimal '10 '0)) (let $10 (DataType 'Decimal '4 '2)) (let $11 (DataType 'Double)) (let $12 (DataType 'DyNumber)) (let $13 (DataType 'Float)) (let $14 (DataType 'Int16)) (let $15 (DataType 'Int32)) (let $16 (DataType 'Int64)) (let $17 (DataType 'Int8)) (let $18 (DataType 'Interval)) (let $19 (DataType 'Json)) (let $20 (DataType 'JsonDocument)) (let $21 (DataType 'String)) (let $22 (DataType 'Timestamp)) (let $23 (DataType 'TzDate)) (let $24 (DataType 'TzDatetime)) (let $25 (DataType 'TzTimestamp)) (let $26 (DataType 'Uint16)) (let $27 (DataType 'Uint32)) (let $28 (DataType 'Uint64)) (let $29 (DataType 'Uint8)) (let $30 (DataType 'Utf8)) (let $31 (DataType 'Uuid)) (let $32 (DataType 'Yson)) (let $33 (StructType '('"col_bool" $6) '('"col_date" $7) '('"col_datetime" $8) '('"col_decimal_precision10_scale0" $9) '('"col_decimal_precision4_scale2" $10) '('"col_double" $11) '('"col_dynumber" $12) '('"col_float" $13) '('"col_int16" $14) '('"col_int32" $15) '('"col_int64" $16) '('"col_int8" $17) '('"col_interval" $18) '('"col_json" $19) '('"col_json_document" $20) '('"col_optional_bool" (OptionalType $6)) '('"col_optional_date" (OptionalType $7)) '('"col_optional_datetime" (OptionalType $8)) '('"col_optional_double" (OptionalType $11)) '('"col_optional_dynumber" (OptionalType $12)) '('"col_optional_float" (OptionalType $13)) '('"col_optional_int16" (OptionalType $14)) '('"col_optional_int32" (OptionalType $15)) '('"col_optional_int64" (OptionalType $16)) '('"col_optional_int8" (OptionalType $17)) '('"col_optional_interval" (OptionalType $18)) '('"col_optional_json" (OptionalType $19)) '('"col_optional_json_document" (OptionalType $20)) '('"col_optional_string" (OptionalType $21)) '('"col_optional_timestamp" (OptionalType $22)) '('"col_optional_tz_date" (OptionalType $23)) '('"col_optional_tz_datetime" (OptionalType $24)) '('"col_optional_tz_timestamp" (OptionalType $25)) '('"col_optional_uint16" (OptionalType $26)) '('"col_optional_uint32" (OptionalType $27)) '('"col_optional_uint64" (OptionalType $28)) '('"col_optional_uint8" (OptionalType $29)) '('"col_optional_utf8" (OptionalType $30)) '('"col_optional_uuid" (OptionalType $31)) '('"col_optional_yson" (OptionalType $32)) '('"col_string" $21) '('"col_timestamp" $22) '('"col_tz_date" $23) '('"col_tz_datetime" $24) '('"col_tz_timestamp" $25) '('"col_uint16" $26) '('"col_uint32" $27) '('"col_uint64" $28) '('"col_uint8" $29) '('"col_utf8" $30) '('"col_uuid" $31) '('"col_yson" $32))) (let $34 (DqSourceWrap $5 (DataSource '"generic" '"test_cluster") $33)) (let $35 (ResWrite! world $1 (Key) (FlatMap $34 (lambda '($37) (OptionalIf (== (Member $37 $3) $4) $37))) '('('type)))) (return (Commit! $35 $1)) ) Dq source filter settings: filter_typed { comparison { operation: EQ left_value { column: "col_decimal_precision4_scale2" } right_value { typed_value { type { decimal_type { precision: 4 scale: 2 } } value { bytes_value: "R\367\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } } } } |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_large/unittest >> THttpProxyWithMTls::NoClientCertificate [GOOD] >> THttpProxyWithMTls::NotSecureConnection |88.5%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest |88.5%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] >> THttpProxyWithMTls::NotSecureConnection [GOOD] |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |88.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GroupStress::Test [GOOD] |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |88.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |88.5%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a ------- [TS] {asan, default-linux-x86_64, release} ydb/library/actors/http/ut/unittest >> THttpProxyWithMTls::NotSecureConnection [GOOD] Test command err: (SSL_ERROR): 1 error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed >> ArrowInferenceTest::csv_simple [GOOD] >> ArrowInferenceTest::tsv_simple [GOOD] >> ArrowInferenceTest::tsv_empty [GOOD] >> ArrowInferenceTest::broken_json [GOOD] >> ArrowInferenceTest::empty_json_each_row [GOOD] >> ArrowInferenceTest::empty_json_list [GOOD] >> ArrowInferenceTest::broken_json_list [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] |88.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |88.5%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest |88.5%| [TS] {RESULT} ydb/library/actors/http/ut/unittest |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction ------- [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/object_storage/inference/ut/gtest >> ArrowInferenceTest::broken_json_list [GOOD] Test command err: {
: Error: couldn't open csv/tsv file, check format and compression parameters: empty file, code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: empty file, code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: empty file, code: 1001 } 2026-02-19T08:04:09.607022Z 1 00h00m00.000000s :OBJECT_STORAGE_INFERENCINATOR DEBUG: TArrowInferencinator: [1:6:6]. HandleFileError: {
: Error: couldn't run arrow json chunker for /path/is/neither/real: Invalid: straddling object straddles two block boundaries (try to increase block size?), code: 1001 } {
: Error: couldn't run arrow json chunker for /path/is/neither/real: Invalid: straddling object straddles two block boundaries (try to increase block size?), code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: Invalid: JSON parse error: Invalid value. in row 0, code: 1001 } >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] |88.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] >> Dictionary::Simple [GOOD] >> Dictionary::ComparePayloadAndFull >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] >> TMvpStartupOptions::DefaultHttpPortWhenNoPorts [GOOD] >> TMvpStartupOptions::HttpsRequiresCert [GOOD] >> TMvpStartupOptions::CliOverridesYaml [GOOD] >> TMvpStartupOptions::YamlHttpsWithoutCertThrows [GOOD] >> TMvpStartupOptions::CliHttpsPortWithYamlCertSucceeds [GOOD] >> TMvpStartupOptions::SslCertWithoutPortsDefaultsHttps [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/core/ut/unittest >> TMvpStartupOptions::SslCertWithoutPortsDefaultsHttps [GOOD] Test command err: 2026-02-19T08:04:07.162107Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2026-02-19T08:04:07.162366Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2026-02-19T08:04:07.173103Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2026-02-19T08:04:07.173345Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2026-02-19T08:04:12.173577Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2026-02-19T08:04:12.173866Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token >> Dictionary::ComparePayloadAndFull [GOOD] >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> Printer::SSAToPrettyString [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::TestValueFromNull [GOOD] >> ProgramStep::MergeFilterSimple [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] >> Slicer::SplitBySizes [GOOD] >> SortableBatchPosition::FindPosition [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithOneSearchPoint [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/formats/arrow/ut/unittest >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007324840764 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04506846012 0.3776978417 zstd(poolsize=1024;keylen=16) 0.0655640205 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1110720087 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1914108287 0.7447345433 zstd(poolsize=128;keylen=1) 0.003769847609 0.04002713704 zstd(poolsize=128;keylen=10) 0.007456731695 0.07809798271 zstd(poolsize=128;keylen=16) 0.0102539786 0.1029455519 zstd(poolsize=128;keylen=32) 0.01677217062 0.1578947368 zstd(poolsize=128;keylen=64) 0.03005940945 0.2517949988 zstd(poolsize=16;keylen=1) 0.002620896858 0.02794819359 zstd(poolsize=16;keylen=10) 0.002816201441 0.03048416019 zstd(poolsize=16;keylen=16) 0.003368308096 0.03570300158 zstd(poolsize=16;keylen=32) 0.004159808469 0.0434375 zstd(poolsize=16;keylen=64) 0.005779996974 0.05875115349 zstd(poolsize=1;keylen=1) 0.002461243407 0.02626193724 zstd(poolsize=1;keylen=10) 0.002154636612 0.0234375 zstd(poolsize=1;keylen=16) 0.002356872222 0.02519132653 zstd(poolsize=1;keylen=32) 0.002427911996 0.02573879886 zstd(poolsize=1;keylen=64) 0.00258021431 0.02699269609 zstd(poolsize=512;keylen=1) 0.005583027596 0.05848930481 zstd(poolsize=512;keylen=10) 0.0236929438 0.2237078941 zstd(poolsize=512;keylen=16) 0.03443366072 0.2936507937 zstd(poolsize=512;keylen=32) 0.05917328099 0.4212765957 zstd(poolsize=512;keylen=64) 0.1058929843 0.5749553837 zstd(poolsize=64;keylen=1) 0.00319560285 0.03401360544 zstd(poolsize=64;keylen=10) 0.004852093844 0.05176470588 zstd(poolsize=64;keylen=16) 0.00633344236 0.06557881773 zstd(poolsize=64;keylen=32) 0.009647738439 0.09619952494 zstd(poolsize=64;keylen=64) 0.01626771323 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----512 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----1024 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% --10000 ---- ... 4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Min):[1]}\",\"{4(Min):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Max):[1]}\",\"{4(Max):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Some):[1]}\",\"{4(Some):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=272;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=248;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":1,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"4444"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"4444"}],"fields":["id1: int32 not null"]},"reverse":true,"position":3,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"3333"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":2,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |88.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |88.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |88.6%| [TS] {RESULT} ydb/core/external_sources/object_storage/inference/ut/gtest |88.6%| [TS] {RESULT} ydb/mvp/core/ut/unittest |88.6%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest |88.6%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] >> Mirror3of4::ReplicationSmall [GOOD] >> Mirror3of4::ReplicationHuge |88.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |88.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] |88.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |88.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> IcbAsActorTests::TestHttpPostReaction |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |88.6%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest |88.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |88.6%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest |88.6%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2026-02-19T08:03:40.759744Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:911:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759757Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:670:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759769Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:335:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759780Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:797:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759794Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:410:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759803Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:536:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759813Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:806:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759829Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:33:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759843Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.759854Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:173:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760416Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:279:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760434Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:0:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760447Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:563:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760462Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:569:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760476Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:372:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760498Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:82:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760515Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:656:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760527Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:93:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760555Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:743:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.760566Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:791:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761101Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761114Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:432:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761128Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:800:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761141Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:129:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761155Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:456:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761171Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:228:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761188Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:930:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761199Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:328:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761213Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:367:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761227Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:248:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761677Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:436:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761691Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:285:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761725Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:415:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761737Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:155:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761748Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:666:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761762Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:788:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761776Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:392:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761792Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:387:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761805Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:897:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.761816Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:763:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762141Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:878:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762160Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762177Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:752:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762189Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:306:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762204Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762218Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:501:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762233Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:507:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762265Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762279Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:770:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762295Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762687Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:991:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762702Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:253:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762717Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:425:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762729Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:406:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762737Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:990:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762744Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:928:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762754Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:581:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762762Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:313:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762772Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:413:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.762780Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:276:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.763199Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:565:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.763219Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:87:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.763229Z :BS_VDISK_PU ... ob# [5000:1:817:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806723Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:504:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806732Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:92:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806924Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806934Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:139:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806947Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:24:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806961Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:215:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806971Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:573:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.806980Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:474:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807003Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:835:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807016Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:17:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807026Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807036Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:145:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807288Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:115:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807306Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:724:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807321Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:47:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807340Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:500:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807355Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:486:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807373Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:586:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807387Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:18:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807401Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:498:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807419Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807432Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:863:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807744Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:883:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807761Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:435:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807773Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:570:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807793Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:912:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807804Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:688:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807817Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:323:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807831Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:258:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807861Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:373:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807875Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.807886Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808123Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808137Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808153Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:194:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808163Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:576:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808174Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:15:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808186Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808196Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:71:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808207Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:243:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808216Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:136:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808226Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:556:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808513Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:192:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808533Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:443:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808541Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808556Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:112:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808565Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808574Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:382:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808583Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:431:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808595Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:152:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808602Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:509:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808627Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:637:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808885Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:338:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808900Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:608:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808909Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:789:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808919Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:7:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808929Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:957:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808938Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:745:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808953Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:786:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808963Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:740:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.808998Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:769:0:0:66560:1] Marker# BSVS08 2026-02-19T08:03:40.809009Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:637: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:515:0:0:66560:1] Marker# BSVS08 |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> SelfHealActorTest::SingleErrorDisk >> BsControllerTest::SelfHealMirror3dc >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] >> BsControllerTest::SelfHealBlock4Plus2 >> SelfHealActorTest::SingleErrorDisk [GOOD] >> BsControllerTest::TestLocalSelfHeal >> BsControllerTest::TestLocalBrokenRelocation >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2026-02-19T08:04:25.548809Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-02-19T08:04:25.548878Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.548969Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-02-19T08:04:25.548997Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.549034Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-02-19T08:04:25.549059Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.549117Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-02-19T08:04:25.549152Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.549191Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-02-19T08:04:25.549243Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.549281Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-02-19T08:04:25.549305Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.549370Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-02-19T08:04:25.549399Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.549440Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-02-19T08:04:25.549470Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.549512Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-02-19T08:04:25.549548Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.549601Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-02-19T08:04:25.549626Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.549663Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-02-19T08:04:25.549685Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.549738Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-02-19T08:04:25.549762Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.549798Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-02-19T08:04:25.549819Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.549851Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-02-19T08:04:25.549872Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.549910Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-02-19T08:04:25.549956Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.568306Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2026-02-19T08:04:25.569201Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2026-02-19T08:04:25.569319Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2026-02-19T08:04:25.569367Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2026-02-19T08:04:25.569416Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2026-02-19T08:04:25.569464Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2026-02-19T08:04:25.569510Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2026-02-19T08:04:25.569572Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2026-02-19T08:04:25.569632Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2026-02-19T08:04:25.569679Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2026-02-19T08:04:25.569740Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2026-02-19T08:04:25.569788Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2026-02-19T08:04:25.569857Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2026-02-19T08:04:25.569897Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2026-02-19T08:04:25.569942Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2026-02-19T08:04:25.628172Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.628268Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.628311Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.628353Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.628397Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.628437Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.628508Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.628552Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.628625Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.628664Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.628709Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.628749Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.628792Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.628837Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.628891Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.633562Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2026-02-19T08:04:25.633632Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2026-02-19T08:04:25.641503Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2026-02-19T08:04:25.642281Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2026-02-19T08:04:25.642354Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2026-02-19T08:04:25.642383Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2026-02-19T08:04:25.642434Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2026-02-19T08:04:25.642464Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2026-02-19T08:04:25.642504Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2026-02-19T08:04:25.642529Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2026-02-19T08:04:25.642567Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2026-02-19T08:04:25.642593Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2026-02-19T08:04:25.642651Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2026-02-19T08:04:25.642692Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2026-02-19T08:04:25.642737Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2026-02-19T08:04:25.642761Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2026-02-19T08:04:25.642820Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2026-02-19T08:04:25.642849Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2026-02-19T08:04:25.642887Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2026-02-19T08:04:25.642913Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2026-02-19T08:04:25.642956Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2026-02-19T08:04:25.642994Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2026-02-19T08:04:25.643040Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2026-02-19T08:04:25.643064Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2026-02-19T08:04:25.643101Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2026-02-19T08:04:25.643125Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2026-02-19T08:04:25.643162Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2026-02-19T08:04:25.643186Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2026-02-19T08:04:25.643239Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2026-02-19T08:04:25.643269Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2026-02-19T08:04:25.646286Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:25.646357Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-02-19T08:04:25.671696Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2026-02-19T08:04:25.673073Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:25.673150Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-02-19T08:04:25.673256Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2026-02-19T08:04:25.673388Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:25.673430Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2026-02-19T08:04:25.673516Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2026-02-19T08:04:25.673652Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-02-19T08:04:25.673699Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2026-02-19T08:04:25.673795Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2026-02-19T08:04:25.673909Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-02-19T08:04:25.673950Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2026-02-19T08:04:25.674005Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2026-02-19T0 ... S_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.008465Z 1 00h01m11.515512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2026-02-19T08:04:26.008769Z 1 00h01m11.515512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.009506Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.009587Z 14 00h01m20.480536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2026-02-19T08:04:26.010012Z 1 00h01m20.480536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.010801Z 8 00h01m20.481048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-02-19T08:04:26.010855Z 8 00h01m20.481048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2026-02-19T08:04:26.011183Z 2 00h01m25.868512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2026-02-19T08:04:26.011597Z 1 00h01m25.868512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.011983Z 1 00h01m26.515512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.012168Z 12 00h01m27.798512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2026-02-19T08:04:26.012464Z 1 00h01m27.798512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.012673Z 15 00h01m29.738512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2026-02-19T08:04:26.012994Z 1 00h01m29.738512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.013412Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.013574Z 10 00h01m30.081512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2026-02-19T08:04:26.013894Z 1 00h01m30.081512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:721} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2026-02-19T08:04:26.014084Z 13 00h01m31.773024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2026-02-19T08:04:26.014610Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483648 2026-02-19T08:04:26.015372Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.015465Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2026-02-19T08:04:26.015789Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.015829Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2026-02-19T08:04:26.015865Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.015898Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2026-02-19T08:04:26.015935Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.015996Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2026-02-19T08:04:26.016037Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.016068Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2026-02-19T08:04:26.016102Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.016140Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2026-02-19T08:04:26.016193Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.016224Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2026-02-19T08:04:26.016258Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:26.016304Z 1 00h01m31.773024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2026-02-19T08:04:26.019139Z 1 00h01m31.773536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:26.019218Z 1 00h01m31.773536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2026-02-19T08:04:26.019826Z 1 00h01m31.773536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2026-02-19T08:04:26.019864Z 1 00h01m31.773536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483648 Success# true 2026-02-19T08:04:26.020045Z 7 00h01m31.773536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2026-02-19T08:04:26.020094Z 7 00h01m31.773536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2026-02-19T08:04:26.020208Z 2 00h01m31.773536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:26.020260Z 2 00h01m31.773536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2026-02-19T08:04:26.020337Z 3 00h01m31.773536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:26.020378Z 3 00h01m31.773536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2026-02-19T08:04:26.020531Z 4 00h01m31.773536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-02-19T08:04:26.020587Z 4 00h01m31.773536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2026-02-19T08:04:26.020697Z 5 00h01m31.773536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-02-19T08:04:26.020748Z 5 00h01m31.773536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2026-02-19T08:04:26.020816Z 6 00h01m31.773536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:26.020853Z 6 00h01m31.773536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2026-02-19T08:04:26.020904Z 9 00h01m31.773536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-02-19T08:04:26.021013Z 13 00h01m31.773536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-02-19T08:04:26.021061Z 13 00h01m31.773536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2026-02-19T08:04:26.021167Z 14 00h01m31.773536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-02-19T08:04:26.021261Z 14 00h01m31.773536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2026-02-19T08:04:26.021379Z 15 00h01m31.773536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-02-19T08:04:26.021426Z 15 00h01m31.773536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2026-02-19T08:04:26.021498Z 15 00h01m31.773536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2026-02-19T08:04:26.022728Z 11 00h01m32.825512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2026-02-19T08:04:26.023262Z 13 00h01m34.329512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2026-02-19T08:04:26.023875Z 3 00h01m34.982512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2026-02-19T08:04:26.025150Z 14 00h01m37.308512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2026-02-19T08:04:26.025704Z 15 00h01m37.414536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2026-02-19T08:04:26.031204Z 15 00h02m02.606536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2026-02-19T08:04:26.032269Z 9 00h02m02.607048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-02-19T08:04:26.032328Z 9 00h02m02.607048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] Test command err: 2026-02-19T08:04:27.398867Z :BS_VDISK_GET CRIT: query_base.h:102: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191 ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> BsControllerTest::TestLocalSelfHeal [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> TBsVDiskBadBlobId::PutBlobWithBadId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2026-02-19T08:04:25.531136Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-02-19T08:04:25.531196Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.531299Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-02-19T08:04:25.531333Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.531374Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-02-19T08:04:25.531400Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.531440Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-02-19T08:04:25.531461Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.531499Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-02-19T08:04:25.531520Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.531561Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-02-19T08:04:25.531583Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.531621Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-02-19T08:04:25.531654Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.531715Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-02-19T08:04:25.531737Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.531788Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-02-19T08:04:25.531812Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.531863Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-02-19T08:04:25.531886Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.531923Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-02-19T08:04:25.531946Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.531986Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-02-19T08:04:25.532007Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.532046Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-02-19T08:04:25.532067Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.532101Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-02-19T08:04:25.532124Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.532162Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-02-19T08:04:25.532185Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.532244Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-02-19T08:04:25.532266Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-02-19T08:04:25.532301Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-02-19T08:04:25.532321Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-02-19T08:04:25.532369Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-02-19T08:04:25.532406Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-02-19T08:04:25.532466Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-02-19T08:04:25.532488Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-02-19T08:04:25.532524Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-02-19T08:04:25.532549Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-02-19T08:04:25.532584Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-02-19T08:04:25.532643Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-02-19T08:04:25.532697Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-02-19T08:04:25.532725Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-02-19T08:04:25.532764Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-02-19T08:04:25.532785Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-02-19T08:04:25.532834Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-02-19T08:04:25.532856Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-02-19T08:04:25.532891Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-02-19T08:04:25.532914Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-02-19T08:04:25.532968Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-02-19T08:04:25.532992Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-02-19T08:04:25.533027Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-02-19T08:04:25.533054Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-02-19T08:04:25.533096Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-02-19T08:04:25.533118Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-02-19T08:04:25.533176Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-02-19T08:04:25.533199Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-02-19T08:04:25.533235Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-02-19T08:04:25.533258Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-02-19T08:04:25.533296Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-02-19T08:04:25.533319Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-02-19T08:04:25.533383Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-02-19T08:04:25.533406Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-02-19T08:04:25.533442Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-02-19T08:04:25.533465Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-02-19T08:04:25.533517Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-02-19T08:04:25.533542Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-02-19T08:04:25.533582Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-02-19T08:04:25.533610Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-02-19T08:04:25.533648Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-02-19T08:04:25.533670Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-02-19T08:04:25.559391Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-02-19T08:04:25.561156Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-02-19T08:04:25.561244Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-02-19T08:04:25.561285Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-02-19T08:04:25.561323Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-02-19T08:04:25.561368Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-02-19T08:04:25.561407Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-02-19T08:04:25.561447Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-02-19T08:04:25.561485Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-02-19T08:04:25.561527Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-02-19T08:04:25.561563Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-02-19T08:04:25.561600Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-02-19T08:04:25.561637Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-02-19T08:04:25.561741Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-02-19T08:04:25.561796Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-02-19T08:04:25.561837Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-02-19T08:04:25.561874Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-02-19T08:04:25.561910Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-02-19T08:04:25.561963Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-02-19T08:04:25.562002Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-02-19T08:04:25.562038Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-02-19T08:04:25.562079Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-02-19T08:04:25.562131Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-02-19T08:04:25.562172Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-02-19T08:04:25.562210Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-02-19T08:04:25.562246Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-02-19T08:04:25.562295Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-02-19T08:04:25.562341Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-02-19T08:04:25.562382Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-02-19T08:04:25.562444Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-02-19T08:04:25.562500Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-02-19T08:04:25.562547Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-02-19T08:04:25.562590Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-02-19T08:04:25.562629Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-02-19T08:04:25.562683Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... icated# true 2026-02-19T08:04:27.637128Z 1 00h05m00.104096s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483662 VDiskId# [8000000e:1:2:0:0] DiskIsOk# true 2026-02-19T08:04:27.637159Z 1 00h05m00.104096s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483662 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.637182Z 1 00h05m00.104096s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483662 VDiskId# [8000000e:1:2:2:0] DiskIsOk# true 2026-02-19T08:04:27.642163Z 1 00h05m00.104608s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483662 Items# [8000000e:1:2:1:0]: 36:1002:1000 -> 36:1000:1010 ConfigTxSeqNo# 48 2026-02-19T08:04:27.642208Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483662 Success# true 2026-02-19T08:04:27.642373Z 36 00h05m00.104608s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.642420Z 36 00h05m00.104608s :BS_NODE DEBUG: [36] VDiskId# [8000000e:2:2:1:0] PDiskId# 1000 VSlotId# 1010 created 2026-02-19T08:04:27.642487Z 36 00h05m00.104608s :BS_NODE DEBUG: [36] VDiskId# [8000000e:2:2:1:0] status changed to INIT_PENDING 2026-02-19T08:04:27.642603Z 3 00h05m00.104608s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:27.642652Z 3 00h05m00.104608s :BS_NODE DEBUG: [3] VDiskId# [8000000e:1:0:2:0] -> [8000000e:2:0:2:0] 2026-02-19T08:04:27.642739Z 21 00h05m00.104608s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-02-19T08:04:27.642784Z 21 00h05m00.104608s :BS_NODE DEBUG: [21] VDiskId# [8000000e:1:1:0:0] -> [8000000e:2:1:0:0] 2026-02-19T08:04:27.642871Z 24 00h05m00.104608s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-02-19T08:04:27.642920Z 24 00h05m00.104608s :BS_NODE DEBUG: [24] VDiskId# [8000000e:1:1:1:0] -> [8000000e:2:1:1:0] 2026-02-19T08:04:27.642990Z 9 00h05m00.104608s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-02-19T08:04:27.643027Z 9 00h05m00.104608s :BS_NODE DEBUG: [9] VDiskId# [8000000e:1:0:0:0] -> [8000000e:2:0:0:0] 2026-02-19T08:04:27.643110Z 27 00h05m00.104608s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2026-02-19T08:04:27.643155Z 27 00h05m00.104608s :BS_NODE DEBUG: [27] VDiskId# [8000000e:1:2:2:0] -> [8000000e:2:2:2:0] 2026-02-19T08:04:27.643225Z 12 00h05m00.104608s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-02-19T08:04:27.643271Z 12 00h05m00.104608s :BS_NODE DEBUG: [12] VDiskId# [8000000e:1:0:1:0] -> [8000000e:2:0:1:0] 2026-02-19T08:04:27.643352Z 15 00h05m00.104608s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-02-19T08:04:27.643396Z 15 00h05m00.104608s :BS_NODE DEBUG: [15] VDiskId# [8000000e:1:1:2:0] -> [8000000e:2:1:2:0] 2026-02-19T08:04:27.643480Z 33 00h05m00.104608s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-02-19T08:04:27.643530Z 33 00h05m00.104608s :BS_NODE DEBUG: [33] VDiskId# [8000000e:1:2:0:0] -> [8000000e:2:2:0:0] 2026-02-19T08:04:27.643817Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483774 2026-02-19T08:04:27.644887Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.644937Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:0:0:0] DiskIsOk# true 2026-02-19T08:04:27.644971Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645000Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:0:1:0] DiskIsOk# true 2026-02-19T08:04:27.645038Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645072Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:0:2:0] DiskIsOk# true 2026-02-19T08:04:27.645113Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645146Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:1:0:0] DiskIsOk# true 2026-02-19T08:04:27.645183Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645208Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:1:1:0] DiskIsOk# true 2026-02-19T08:04:27.645232Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645258Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:1:2:0] DiskIsOk# true 2026-02-19T08:04:27.645298Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645322Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:2:0:0] DiskIsOk# true 2026-02-19T08:04:27.645356Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483774 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:27.645396Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483774 VDiskId# [8000007e:1:2:2:0] DiskIsOk# true 2026-02-19T08:04:27.650512Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483774 Items# [8000007e:1:2:1:0]: 36:1002:1007 -> 36:1001:1010 ConfigTxSeqNo# 49 2026-02-19T08:04:27.650563Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483774 Success# true 2026-02-19T08:04:27.650722Z 36 00h05m00.105120s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.650766Z 36 00h05m00.105120s :BS_NODE DEBUG: [36] VDiskId# [8000007e:2:2:1:0] PDiskId# 1001 VSlotId# 1010 created 2026-02-19T08:04:27.650832Z 36 00h05m00.105120s :BS_NODE DEBUG: [36] VDiskId# [8000007e:2:2:1:0] status changed to INIT_PENDING 2026-02-19T08:04:27.650949Z 3 00h05m00.105120s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:27.651006Z 3 00h05m00.105120s :BS_NODE DEBUG: [3] VDiskId# [8000007e:1:0:2:0] -> [8000007e:2:0:2:0] 2026-02-19T08:04:27.651088Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-02-19T08:04:27.651140Z 21 00h05m00.105120s :BS_NODE DEBUG: [21] VDiskId# [8000007e:1:1:0:0] -> [8000007e:2:1:0:0] 2026-02-19T08:04:27.651238Z 24 00h05m00.105120s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-02-19T08:04:27.651281Z 24 00h05m00.105120s :BS_NODE DEBUG: [24] VDiskId# [8000007e:1:1:1:0] -> [8000007e:2:1:1:0] 2026-02-19T08:04:27.651365Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2026-02-19T08:04:27.651407Z 9 00h05m00.105120s :BS_NODE DEBUG: [9] VDiskId# [8000007e:1:0:0:0] -> [8000007e:2:0:0:0] 2026-02-19T08:04:27.651503Z 27 00h05m00.105120s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2026-02-19T08:04:27.651546Z 27 00h05m00.105120s :BS_NODE DEBUG: [27] VDiskId# [8000007e:1:2:2:0] -> [8000007e:2:2:2:0] 2026-02-19T08:04:27.651622Z 12 00h05m00.105120s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-02-19T08:04:27.651674Z 12 00h05m00.105120s :BS_NODE DEBUG: [12] VDiskId# [8000007e:1:0:1:0] -> [8000007e:2:0:1:0] 2026-02-19T08:04:27.651747Z 15 00h05m00.105120s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2026-02-19T08:04:27.651788Z 15 00h05m00.105120s :BS_NODE DEBUG: [15] VDiskId# [8000007e:1:1:2:0] -> [8000007e:2:1:2:0] 2026-02-19T08:04:27.651871Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2026-02-19T08:04:27.651912Z 33 00h05m00.105120s :BS_NODE DEBUG: [33] VDiskId# [8000007e:1:2:0:0] -> [8000007e:2:2:0:0] 2026-02-19T08:04:27.652894Z 36 00h05m02.150584s :BS_NODE DEBUG: [36] VDiskId# [8000002e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.653558Z 36 00h05m02.522560s :BS_NODE DEBUG: [36] VDiskId# [8000004e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.654179Z 36 00h05m03.567120s :BS_NODE DEBUG: [36] VDiskId# [8000007e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.654761Z 36 00h05m04.195608s :BS_NODE DEBUG: [36] VDiskId# [8000000e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.655414Z 36 00h05m04.634072s :BS_NODE DEBUG: [36] VDiskId# [8000003e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.656004Z 36 00h05m04.708096s :BS_NODE DEBUG: [36] VDiskId# [8000001e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.656607Z 36 00h05m04.815048s :BS_NODE DEBUG: [36] VDiskId# [8000005e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.658153Z 36 00h05m05.797536s :BS_NODE DEBUG: [36] VDiskId# [8000006e:2:2:1:0] status changed to REPLICATING 2026-02-19T08:04:27.659129Z 36 00h05m10.807096s :BS_NODE DEBUG: [36] VDiskId# [8000001e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.660619Z 36 00h05m10.807608s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.660678Z 36 00h05m10.807608s :BS_NODE DEBUG: [36] VDiskId# [8000001e:1:2:1:0] destroyed 2026-02-19T08:04:27.660850Z 36 00h05m12.222560s :BS_NODE DEBUG: [36] VDiskId# [8000004e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.662157Z 36 00h05m12.223072s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.662204Z 36 00h05m12.223072s :BS_NODE DEBUG: [36] VDiskId# [8000004e:1:2:1:0] destroyed 2026-02-19T08:04:27.662339Z 36 00h05m13.379536s :BS_NODE DEBUG: [36] VDiskId# [8000006e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.663625Z 36 00h05m13.380048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.663668Z 36 00h05m13.380048s :BS_NODE DEBUG: [36] VDiskId# [8000006e:1:2:1:0] destroyed 2026-02-19T08:04:27.663826Z 36 00h05m13.789584s :BS_NODE DEBUG: [36] VDiskId# [8000002e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.665095Z 36 00h05m13.790096s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.665144Z 36 00h05m13.790096s :BS_NODE DEBUG: [36] VDiskId# [8000002e:1:2:1:0] destroyed 2026-02-19T08:04:27.666169Z 36 00h05m21.158048s :BS_NODE DEBUG: [36] VDiskId# [8000005e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.667588Z 36 00h05m21.158560s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.667633Z 36 00h05m21.158560s :BS_NODE DEBUG: [36] VDiskId# [8000005e:1:2:1:0] destroyed 2026-02-19T08:04:27.667788Z 36 00h05m22.926120s :BS_NODE DEBUG: [36] VDiskId# [8000007e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.669257Z 36 00h05m22.926632s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.669308Z 36 00h05m22.926632s :BS_NODE DEBUG: [36] VDiskId# [8000007e:1:2:1:0] destroyed 2026-02-19T08:04:27.671234Z 36 00h05m34.655072s :BS_NODE DEBUG: [36] VDiskId# [8000003e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.672672Z 36 00h05m34.655584s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.672718Z 36 00h05m34.655584s :BS_NODE DEBUG: [36] VDiskId# [8000003e:1:2:1:0] destroyed 2026-02-19T08:04:27.673056Z 36 00h05m35.146608s :BS_NODE DEBUG: [36] VDiskId# [8000000e:2:2:1:0] status changed to READY 2026-02-19T08:04:27.674639Z 36 00h05m35.147120s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2026-02-19T08:04:27.674682Z 36 00h05m35.147120s :BS_NODE DEBUG: [36] VDiskId# [8000000e:1:2:1:0] destroyed >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2026-02-19T08:04:25.545451Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-02-19T08:04:25.545509Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.545586Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-02-19T08:04:25.545608Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.545646Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-02-19T08:04:25.545670Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.545722Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-02-19T08:04:25.545743Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.545794Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-02-19T08:04:25.545819Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.545861Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-02-19T08:04:25.545882Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.545916Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-02-19T08:04:25.545959Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.546001Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-02-19T08:04:25.546021Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.546073Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-02-19T08:04:25.546096Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.546147Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-02-19T08:04:25.546171Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.546204Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-02-19T08:04:25.546229Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.546261Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-02-19T08:04:25.546282Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.546317Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-02-19T08:04:25.546338Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.546383Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-02-19T08:04:25.546403Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.546442Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-02-19T08:04:25.546468Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.546515Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-02-19T08:04:25.546536Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-02-19T08:04:25.546569Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-02-19T08:04:25.546588Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-02-19T08:04:25.546629Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-02-19T08:04:25.546653Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-02-19T08:04:25.546692Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-02-19T08:04:25.546714Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-02-19T08:04:25.546747Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-02-19T08:04:25.546772Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-02-19T08:04:25.546826Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-02-19T08:04:25.546847Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-02-19T08:04:25.546899Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-02-19T08:04:25.546927Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-02-19T08:04:25.546960Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-02-19T08:04:25.546986Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-02-19T08:04:25.547025Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-02-19T08:04:25.547044Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-02-19T08:04:25.547077Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-02-19T08:04:25.547098Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-02-19T08:04:25.547153Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-02-19T08:04:25.547177Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-02-19T08:04:25.547209Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-02-19T08:04:25.547235Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-02-19T08:04:25.547270Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-02-19T08:04:25.547291Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-02-19T08:04:25.547343Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-02-19T08:04:25.547365Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-02-19T08:04:25.547399Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-02-19T08:04:25.547423Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-02-19T08:04:25.547464Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-02-19T08:04:25.547493Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-02-19T08:04:25.547543Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-02-19T08:04:25.547563Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-02-19T08:04:25.547595Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-02-19T08:04:25.547615Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-02-19T08:04:25.547657Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-02-19T08:04:25.547678Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-02-19T08:04:25.547716Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-02-19T08:04:25.547739Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-02-19T08:04:25.547772Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-02-19T08:04:25.547794Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-02-19T08:04:25.572483Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-02-19T08:04:25.574218Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-02-19T08:04:25.574288Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-02-19T08:04:25.574336Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-02-19T08:04:25.574374Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-02-19T08:04:25.574419Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-02-19T08:04:25.574459Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-02-19T08:04:25.574499Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-02-19T08:04:25.574540Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-02-19T08:04:25.574585Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-02-19T08:04:25.574626Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-02-19T08:04:25.575353Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-02-19T08:04:25.575439Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-02-19T08:04:25.575514Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-02-19T08:04:25.575560Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-02-19T08:04:25.575598Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-02-19T08:04:25.575630Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-02-19T08:04:25.575665Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-02-19T08:04:25.575710Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-02-19T08:04:25.575747Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-02-19T08:04:25.575781Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-02-19T08:04:25.575824Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-02-19T08:04:25.575875Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-02-19T08:04:25.575919Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-02-19T08:04:25.575960Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-02-19T08:04:25.576009Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-02-19T08:04:25.576059Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-02-19T08:04:25.576109Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-02-19T08:04:25.576152Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-02-19T08:04:25.576215Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-02-19T08:04:25.576264Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-02-19T08:04:25.576311Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-02-19T08:04:25.576352Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-02-19T08:04:25.576391Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-02-19T08:04:25.576468Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2026-02-19T08:04:29.877679Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2026-02-19T08:04:29.877756Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2026-02-19T08:04:29.877803Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2026-02-19T08:04:29.877854Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2026-02-19T08:04:29.878524Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-02-19T08:04:29.878583Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2026-02-19T08:04:29.878643Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2026-02-19T08:04:29.878690Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2026-02-19T08:04:29.878736Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2026-02-19T08:04:29.878783Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2026-02-19T08:04:29.878824Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2026-02-19T08:04:29.878885Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2026-02-19T08:04:29.878933Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2026-02-19T08:04:29.878978Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2026-02-19T08:04:29.879025Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2026-02-19T08:04:29.879087Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2026-02-19T08:04:29.879134Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2026-02-19T08:04:29.879179Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2026-02-19T08:04:29.879221Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2026-02-19T08:04:29.879265Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2026-02-19T08:04:29.879306Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2026-02-19T08:04:29.880036Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2026-02-19T08:04:29.880097Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2026-02-19T08:04:29.880142Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2026-02-19T08:04:29.880185Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2026-02-19T08:04:29.880246Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2026-02-19T08:04:29.880303Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2026-02-19T08:04:29.880348Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2026-02-19T08:04:29.880392Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2026-02-19T08:04:29.880439Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2026-02-19T08:04:29.880499Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2026-02-19T08:04:29.880542Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2026-02-19T08:04:29.880593Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2026-02-19T08:04:29.881067Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2026-02-19T08:04:29.881129Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2026-02-19T08:04:29.881172Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2026-02-19T08:04:29.881216Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2026-02-19T08:04:29.881268Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2026-02-19T08:04:29.881332Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2026-02-19T08:04:29.881377Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2026-02-19T08:04:29.881429Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2026-02-19T08:04:29.881471Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2026-02-19T08:04:29.884392Z 5 01h25m01.276560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.884896Z 4 01h25m01.717560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.885307Z 8 01h25m01.997560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2026-02-19T08:04:29.885752Z 10 01h25m02.369560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2026-02-19T08:04:29.886235Z 5 01h25m03.707560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.886647Z 10 01h25m03.814560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2026-02-19T08:04:29.886998Z 4 01h25m03.998560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.887420Z 2 01h25m04.039560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.887811Z 2 01h25m04.058560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.888247Z 7 01h25m04.225560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2026-02-19T08:04:29.888655Z 7 01h25m04.245560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2026-02-19T08:04:29.889053Z 4 01h25m04.385560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.889469Z 4 01h25m04.436560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2026-02-19T08:04:29.889884Z 7 01h25m04.556560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2026-02-19T08:04:29.890273Z 10 01h25m04.623560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2026-02-19T08:04:29.892402Z 7 01h25m05.471560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2026-02-19T08:04:29.892968Z 4 01h25m07.615560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2026-02-19T08:04:29.894055Z 1 01h25m07.616072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.894135Z 1 01h25m07.616072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2026-02-19T08:04:29.894675Z 10 01h25m11.495560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2026-02-19T08:04:29.895608Z 1 01h25m11.496072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.895665Z 1 01h25m11.496072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2026-02-19T08:04:29.895786Z 8 01h25m11.513560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2026-02-19T08:04:29.896643Z 1 01h25m11.514072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.896697Z 1 01h25m11.514072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2026-02-19T08:04:29.896802Z 5 01h25m11.873560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2026-02-19T08:04:29.897925Z 1 01h25m11.874072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.897976Z 1 01h25m11.874072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2026-02-19T08:04:29.898109Z 7 01h25m12.556560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2026-02-19T08:04:29.898909Z 1 01h25m12.557072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.898958Z 1 01h25m12.557072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2026-02-19T08:04:29.900002Z 7 01h25m16.330560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2026-02-19T08:04:29.900960Z 1 01h25m16.331072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.901010Z 1 01h25m16.331072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2026-02-19T08:04:29.901136Z 10 01h25m17.356560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2026-02-19T08:04:29.901979Z 1 01h25m17.357072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.902031Z 1 01h25m17.357072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2026-02-19T08:04:29.902173Z 4 01h25m19.822560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2026-02-19T08:04:29.903020Z 1 01h25m19.823072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.903076Z 1 01h25m19.823072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2026-02-19T08:04:29.903575Z 10 01h25m20.326560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2026-02-19T08:04:29.904413Z 1 01h25m20.327072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.904490Z 1 01h25m20.327072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2026-02-19T08:04:29.904609Z 7 01h25m21.852560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2026-02-19T08:04:29.905376Z 1 01h25m21.853072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.905427Z 1 01h25m21.853072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2026-02-19T08:04:29.905875Z 5 01h25m24.737560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2026-02-19T08:04:29.906659Z 1 01h25m24.738072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.906708Z 1 01h25m24.738072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2026-02-19T08:04:29.907184Z 4 01h25m25.272560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2026-02-19T08:04:29.907950Z 1 01h25m25.273072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.907999Z 1 01h25m25.273072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2026-02-19T08:04:29.909490Z 2 01h25m28.693560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2026-02-19T08:04:29.910300Z 1 01h25m28.694072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.910353Z 1 01h25m28.694072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2026-02-19T08:04:29.911926Z 7 01h25m32.488560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2026-02-19T08:04:29.912773Z 1 01h25m32.489072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.912820Z 1 01h25m32.489072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2026-02-19T08:04:29.913814Z 2 01h25m36.232560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2026-02-19T08:04:29.914689Z 1 01h25m36.233072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.914743Z 1 01h25m36.233072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2026-02-19T08:04:29.915136Z 4 01h25m38.487560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2026-02-19T08:04:29.915851Z 1 01h25m38.488072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:29.915924Z 1 01h25m38.488072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] Test command err: 2026-02-19T08:04:29.841144Z :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:572: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2026-02-19T08:04:30.752166Z :BS_VDISK_OTHER ERROR: vdisk_context.h:143: PDiskId# 1 VDISK[0:_:0:0:0]: (0) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2026-02-19T08:04:30.752338Z :BS_SKELETON ERROR: blobstorage_skeletonfront.cpp:1755: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 >> TIncrHugeBasicTest::Defrag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag [GOOD] Test command err: 2026-02-19T08:03:36.734182Z :BS_INCRHUGE DEBUG: incrhuge_keeper.cpp:72: BlockSize# 8128 BlocksInChunk# 2304 BlocksInMinBlob# 65 MaxBlobsPerChunk# 35 BlocksInDataSection# 2303 BlocksInIndexSection# 1 2026-02-19T08:03:36.734306Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:152: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] starting ReadLog 2026-02-19T08:03:36.737087Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:161: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] finished ReadLog 2026-02-19T08:03:36.737125Z :BS_INCRHUGE DEBUG: incrhuge_keeper_recovery.cpp:200: [PDisk# 000000001 Recovery] ApplyReadLog Chunks# [] Deletes# [] Owners# {} CurrentSerNum# 0 NextLsn# 1 2026-02-19T08:03:36.737163Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:515: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] ready 2026-02-19T08:03:36.737201Z :TEST DEBUG: test_actor_concurrent.h:153: finished Init Reference# [] Enumerated# [] InFlightDeletes# [] 2026-02-19T08:03:36.737212Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 1 2026-02-19T08:03:36.737225Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 0 InFlightWritesSize# 0 2026-02-19T08:03:36.739121Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 NumReq# 0 2026-02-19T08:03:36.740858Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 1 InFlightWritesSize# 1 2026-02-19T08:03:36.741881Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 0 HandleWrite Lsn# 0 DataSize# 811717 WriteQueueSize# 1 WriteInProgressItemsSize# 0 2026-02-19T08:03:36.741918Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2026-02-19T08:03:36.741951Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2026-02-19T08:03:36.741968Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2026-02-19T08:03:36.749112Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 NumReq# 1 2026-02-19T08:03:36.750076Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 1 HandleWrite Lsn# 1 DataSize# 1745495 WriteQueueSize# 2 WriteInProgressItemsSize# 0 2026-02-19T08:03:36.750105Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2026-02-19T08:03:36.750120Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2026-02-19T08:03:36.750141Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2026-02-19T08:03:36.752984Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 2 InFlightWritesSize# 2 2026-02-19T08:03:36.754677Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:460: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1 Status# OK 2026-02-19T08:03:36.754727Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 2 ChunkSerNum# 1000 2026-02-19T08:03:36.754771Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 3 ChunkSerNum# 1001 2026-02-19T08:03:36.754795Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 4 ChunkSerNum# 1002 2026-02-19T08:03:36.754819Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 5 ChunkSerNum# 1003 2026-02-19T08:03:36.754845Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 6 ChunkSerNum# 1004 2026-02-19T08:03:36.754855Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 7 ChunkSerNum# 1005 2026-02-19T08:03:36.754865Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 8 ChunkSerNum# 1006 2026-02-19T08:03:36.754876Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 9 ChunkSerNum# 1007 2026-02-19T08:03:36.754893Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2026-02-19T08:03:36.754925Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2026-02-19T08:03:36.755620Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 100 SizeInBytes# 812800 Offset# 0 Size# 812800 End# 812800 Id# 0000000000000000 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.755635Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem entry 2026-02-19T08:03:36.755995Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem OffsetInBlocks# 100 IndexInsideChunk# 1 SizeInBlocks# 215 SizeInBytes# 1747520 Offset# 812800 Size# 1747520 End# 2560320 Id# 0000000000000001 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.758209Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:602037:2:0] Lsn# 2 NumReq# 2 2026-02-19T08:03:36.759339Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2 HandleWrite Lsn# 2 DataSize# 602037 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2026-02-19T08:03:36.759365Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2026-02-19T08:03:36.759398Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem entry 2026-02-19T08:03:36.759588Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem OffsetInBlocks# 315 IndexInsideChunk# 2 SizeInBlocks# 75 SizeInBytes# 609600 Offset# 2560320 Size# 609600 End# 3169920 Id# 0000000000000002 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.759999Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 3 InFlightWritesSize# 3 2026-02-19T08:03:36.761861Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1287465:3:0] Lsn# 3 NumReq# 3 2026-02-19T08:03:36.764386Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 4 InFlightWritesSize# 4 2026-02-19T08:03:36.764783Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 3 HandleWrite Lsn# 3 DataSize# 1287465 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2026-02-19T08:03:36.764799Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2026-02-19T08:03:36.764813Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem entry 2026-02-19T08:03:36.765094Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem OffsetInBlocks# 390 IndexInsideChunk# 3 SizeInBlocks# 159 SizeInBytes# 1292352 Offset# 3169920 Size# 1292352 End# 4462272 Id# 0000000000000003 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.765139Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 0 ApplyBlobWrite Status# OK 2026-02-19T08:03:36.765299Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 3 2026-02-19T08:03:36.765328Z :BS_INCRHUGE DEBUG: incrhuge_keeper_defrag.cpp:46: [PDisk# 000000001 Defragmenter] overall efficiency 0.030 2026-02-19T08:03:36.768694Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1501676:4:0] Lsn# 4 NumReq# 4 2026-02-19T08:03:36.769337Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 4 HandleWrite Lsn# 4 DataSize# 1501676 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2026-02-19T08:03:36.769355Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2026-02-19T08:03:36.769368Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem entry 2026-02-19T08:03:36.769749Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem OffsetInBlocks# 549 IndexInsideChunk# 4 SizeInBlocks# 185 SizeInBytes# 1503680 Offset# 4462272 Size# 1503680 End# 5965952 Id# 0000000000000004 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.771462Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 5 InFlightWritesSize# 5 2026-02-19T08:03:36.772574Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:687721:5:0] Lsn# 5 NumReq# 5 2026-02-19T08:03:36.773780Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 5 HandleWrite Lsn# 5 DataSize# 687721 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2026-02-19T08:03:36.773820Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2026-02-19T08:03:36.773836Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem entry 2026-02-19T08:03:36.774070Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem OffsetInBlocks# 734 IndexInsideChunk# 5 SizeInBlocks# 85 SizeInBytes# 690880 Offset# 5965952 Size# 690880 End# 6656832 Id# 0000000000000005 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2026-02-19T08:03:36.781945Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 6 InFlightWritesSize# 6 2026-02-19T08:03:36.785026Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1957662:6:0] Lsn# 6 NumReq# 6 2026-02-19T08:03:36.785816Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 6 HandleWrite Lsn# 6 DataSize# 1957662 WriteQueueSize# 1 WriteInProgressItemsSize# 5 2026-02-19T08:03:36.785846Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 5 2026-02-19T08:03:36.792753Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2026-02-19T08:03:36.798004Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1824284:7:0] Lsn# 7 NumReq# 7 2026-02-19T08:03:36.801387Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 1 ApplyBlobWrite Status# OK 2026-02-19T08:03:36.801673Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2026-02-19T08:03:36.801698Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 6 ProcessWriteItem entry 2026-02-19T08:03:36.801837Z :TEST DEBUG: test_actor_concurrent.h:308: finished Write Id# 0000000000000000 LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 2026-02-19T08:03:36.801893Z :TEST INFO: test_actor_concurrent.h:320: BytesWritten# 0 MB ElapsedTime# 0.116251s Speed# 0.00 MB/s 2026-02-19T08:03:36.801910Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 2 2026-02-19T08:03:36.801927Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2026-02-19T08:03:36.801952Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000000 NumReq# 7 2026-02-19T08:03:36.801981Z :TEST DEBUG: test_actor_concurrent.h:308: finished Write Id# 0000000000000001 LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 2026-02-19T08:03:36.801998Z :TEST INFO: test_actor_concurrent.h:320: BytesWritten# 0 MB ElapsedTime# 0.116385s Speed# 0.00 MB/s 2026-02-19T08:03:36.802005Z :TEST DEBUG: test_actor_concurren ... 00000001 Deleter] finished chunk delete ChunkIdx# 39 Status# OK 2026-02-19T08:04:32.435474Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 560 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.435844Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 7 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.435865Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 565 ProcessWriteItem entry 2026-02-19T08:04:32.436083Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 565 ProcessWriteItem OffsetInBlocks# 1134 IndexInsideChunk# 8 SizeInBlocks# 99 SizeInBytes# 804672 Offset# 9217152 Size# 804672 End# 10021824 Id# 0000000000000015 ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.436127Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 572 HandleWrite Lsn# 1192 DataSize# 1815657 WriteQueueSize# 7 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.436144Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 7 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.436163Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 561 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.436571Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 7 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.436592Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 566 ProcessWriteItem entry 2026-02-19T08:04:32.436797Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 566 ProcessWriteItem OffsetInBlocks# 1233 IndexInsideChunk# 9 SizeInBlocks# 115 SizeInBytes# 934720 Offset# 10021824 Size# 934720 End# 10956544 Id# 000000000000002f ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.436824Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 562 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.437221Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 6 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.437233Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 567 ProcessWriteItem entry 2026-02-19T08:04:32.437352Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 567 ProcessWriteItem OffsetInBlocks# 1348 IndexInsideChunk# 10 SizeInBlocks# 79 SizeInBytes# 642112 Offset# 10956544 Size# 642112 End# 11598656 Id# 000000000000002b ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.438231Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 45 InFlightWritesSize# 25 2026-02-19T08:04:32.440170Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1564457:1193:0] Lsn# 1193 NumReq# 45 2026-02-19T08:04:32.440208Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 573 HandleWrite Lsn# 1193 DataSize# 1564457 WriteQueueSize# 6 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.440230Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 6 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.441821Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 563 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.442528Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 6 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.442556Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 568 ProcessWriteItem entry 2026-02-19T08:04:32.442823Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 568 ProcessWriteItem OffsetInBlocks# 1427 IndexInsideChunk# 11 SizeInBlocks# 129 SizeInBytes# 1048512 Offset# 11598656 Size# 1048512 End# 12647168 Id# 000000000000000e ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.442865Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 564 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.443040Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 5 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.443061Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 569 ProcessWriteItem entry 2026-02-19T08:04:32.443232Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 569 ProcessWriteItem OffsetInBlocks# 1556 IndexInsideChunk# 12 SizeInBlocks# 88 SizeInBytes# 715264 Offset# 12647168 Size# 715264 End# 13362432 Id# 0000000000000012 ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.443885Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 46 InFlightWritesSize# 26 2026-02-19T08:04:32.445392Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:460: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1186 Status# OK 2026-02-19T08:04:32.445456Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:460: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1187 Status# OK 2026-02-19T08:04:32.445485Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:474: [PDisk# 000000001 Logger] DeleteChunk ChunkIdx# 40 ChunkSerNum# 1267 2026-02-19T08:04:32.445512Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 1188 Virtual# true 2026-02-19T08:04:32.445535Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:309: [PDisk# 000000001 Deleter] finished chunk delete ChunkIdx# 40 Status# OK 2026-02-19T08:04:32.446100Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1579253:1194:0] Lsn# 1194 NumReq# 46 2026-02-19T08:04:32.448190Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 565 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.449539Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 47 InFlightWritesSize# 27 2026-02-19T08:04:32.450603Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:584806:1195:0] Lsn# 1195 NumReq# 47 2026-02-19T08:04:32.451809Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 48 InFlightWritesSize# 28 2026-02-19T08:04:32.453453Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:831121:1196:0] Lsn# 1196 NumReq# 48 2026-02-19T08:04:32.453903Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 4 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.453932Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 570 ProcessWriteItem entry 2026-02-19T08:04:32.454383Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 570 ProcessWriteItem OffsetInBlocks# 1644 IndexInsideChunk# 13 SizeInBlocks# 258 SizeInBytes# 2097024 Offset# 13362432 Size# 2097024 End# 15459456 Id# 0000000000000011 ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.454443Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 574 HandleWrite Lsn# 1194 DataSize# 1579253 WriteQueueSize# 4 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.454462Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 4 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.454491Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 566 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.454785Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 4 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.454802Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 571 ProcessWriteItem entry 2026-02-19T08:04:32.455176Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 49 InFlightWritesSize# 29 2026-02-19T08:04:32.455212Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 571 ProcessWriteItem OffsetInBlocks# 1902 IndexInsideChunk# 14 SizeInBlocks# 252 SizeInBytes# 2048256 Offset# 15459456 Size# 2048256 End# 17507712 Id# 000000000000001a ChunkIdx# 42 ChunkSerNum# 1269 Defrag# false 2026-02-19T08:04:32.455241Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 567 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.455450Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.455464Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 572 ProcessWriteItem entry 2026-02-19T08:04:32.455500Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:534: [PDisk# 000000001 Writer] IndexWrite chunkIdx# 42 offset# 17507712 size# 1219200 end# 18726912 2026-02-19T08:04:32.455840Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 572 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 224 SizeInBytes# 1820672 Offset# 0 Size# 1820672 End# 1820672 Id# 0000000000000028 ChunkIdx# 43 ChunkSerNum# 1270 Defrag# false 2026-02-19T08:04:32.455875Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 575 HandleWrite Lsn# 1195 DataSize# 584806 WriteQueueSize# 3 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.455890Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.455906Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 568 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.456005Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:562722:1197:0] Lsn# 1197 NumReq# 49 2026-02-19T08:04:32.456210Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.456227Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 573 ProcessWriteItem entry 2026-02-19T08:04:32.456555Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 573 ProcessWriteItem OffsetInBlocks# 224 IndexInsideChunk# 1 SizeInBlocks# 193 SizeInBytes# 1568704 Offset# 1820672 Size# 1568704 End# 3389376 Id# 0000000000000020 ChunkIdx# 43 ChunkSerNum# 1270 Defrag# false 2026-02-19T08:04:32.456582Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 569 ApplyBlobWrite Status# OK 2026-02-19T08:04:32.456785Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 4 2026-02-19T08:04:32.456800Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 574 ProcessWriteItem entry 2026-02-19T08:04:32.457093Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 574 ProcessWriteItem OffsetInBlocks# 417 IndexInsideChunk# 2 SizeInBlocks# 195 SizeInBytes# 1584960 Offset# 3389376 Size# 1584960 End# 4974336 Id# 000000000000001e ChunkIdx# 43 ChunkSerNum# 1270 Defrag# false 2026-02-19T08:04:32.457123Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 576 HandleWrite Lsn# 1196 DataSize# 831121 WriteQueueSize# 2 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.457137Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.457156Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 577 HandleWrite Lsn# 1197 DataSize# 562722 WriteQueueSize# 3 WriteInProgressItemsSize# 5 2026-02-19T08:04:32.457168Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 5 >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim |88.6%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 200.3048367 None domains 1 old (ns): 105.9038667 None domains 9 new (ns): 126.8824625 None domains 9 old (ns): 63.73997196 4Plus2Block domains 8 new (ns): 93.12803166 4Plus2Block domains 8 old (ns): 43.60918054 4Plus2Block domains 9 new (ns): 76.74454936 4Plus2Block domains 9 old (ns): 46.91574415 ErasureMirror3of4 domains 8 new (ns): 65.84844436 ErasureMirror3of4 domains 8 old (ns): 44.62978539 ErasureMirror3of4 domains 9 new (ns): 102.6377753 ErasureMirror3of4 domains 9 old (ns): 67.11723771 |88.6%| [TA] $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GivenIdRange::Allocate [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2026-02-19T08:04:25.491285Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-02-19T08:04:25.491344Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.491423Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-02-19T08:04:25.491448Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.491522Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-02-19T08:04:25.491554Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.491599Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-02-19T08:04:25.491622Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.491651Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-02-19T08:04:25.491672Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.491726Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-02-19T08:04:25.491749Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.491794Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-02-19T08:04:25.491819Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.491854Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-02-19T08:04:25.491880Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.491916Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-02-19T08:04:25.491937Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.491992Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-02-19T08:04:25.492017Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.492058Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-02-19T08:04:25.492085Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.492133Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-02-19T08:04:25.492161Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.492204Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-02-19T08:04:25.492225Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.492257Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-02-19T08:04:25.492282Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.492314Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-02-19T08:04:25.492336Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.492373Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-02-19T08:04:25.492395Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-02-19T08:04:25.492428Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-02-19T08:04:25.492468Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-02-19T08:04:25.492501Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-02-19T08:04:25.492523Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-02-19T08:04:25.492561Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-02-19T08:04:25.492602Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-02-19T08:04:25.492654Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-02-19T08:04:25.492677Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-02-19T08:04:25.492725Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-02-19T08:04:25.492746Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-02-19T08:04:25.492787Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-02-19T08:04:25.492807Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-02-19T08:04:25.492837Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-02-19T08:04:25.492861Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-02-19T08:04:25.492912Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-02-19T08:04:25.492936Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-02-19T08:04:25.492970Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-02-19T08:04:25.492992Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-02-19T08:04:25.493041Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-02-19T08:04:25.493064Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-02-19T08:04:25.493100Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-02-19T08:04:25.493130Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-02-19T08:04:25.493188Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-02-19T08:04:25.493211Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-02-19T08:04:25.493262Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-02-19T08:04:25.493283Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-02-19T08:04:25.493315Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-02-19T08:04:25.493336Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-02-19T08:04:25.493366Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-02-19T08:04:25.493407Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-02-19T08:04:25.493448Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-02-19T08:04:25.493470Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-02-19T08:04:25.515723Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2163:55] Status# ERROR ClientId# [1:2163:55] ServerId# [0:0:0] PipeClient# [1:2163:55] 2026-02-19T08:04:25.517400Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2164:37] Status# ERROR ClientId# [2:2164:37] ServerId# [0:0:0] PipeClient# [2:2164:37] 2026-02-19T08:04:25.517465Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2165:37] Status# ERROR ClientId# [3:2165:37] ServerId# [0:0:0] PipeClient# [3:2165:37] 2026-02-19T08:04:25.517505Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2166:37] Status# ERROR ClientId# [4:2166:37] ServerId# [0:0:0] PipeClient# [4:2166:37] 2026-02-19T08:04:25.517557Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2167:37] Status# ERROR ClientId# [5:2167:37] ServerId# [0:0:0] PipeClient# [5:2167:37] 2026-02-19T08:04:25.517599Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2168:37] Status# ERROR ClientId# [6:2168:37] ServerId# [0:0:0] PipeClient# [6:2168:37] 2026-02-19T08:04:25.517638Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2169:37] Status# ERROR ClientId# [7:2169:37] ServerId# [0:0:0] PipeClient# [7:2169:37] 2026-02-19T08:04:25.517691Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2170:37] Status# ERROR ClientId# [8:2170:37] ServerId# [0:0:0] PipeClient# [8:2170:37] 2026-02-19T08:04:25.517761Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2171:37] Status# ERROR ClientId# [9:2171:37] ServerId# [0:0:0] PipeClient# [9:2171:37] 2026-02-19T08:04:25.517810Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2172:37] Status# ERROR ClientId# [10:2172:37] ServerId# [0:0:0] PipeClient# [10:2172:37] 2026-02-19T08:04:25.517850Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2173:37] Status# ERROR ClientId# [11:2173:37] ServerId# [0:0:0] PipeClient# [11:2173:37] 2026-02-19T08:04:25.517887Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2174:37] Status# ERROR ClientId# [12:2174:37] ServerId# [0:0:0] PipeClient# [12:2174:37] 2026-02-19T08:04:25.517933Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2175:37] Status# ERROR ClientId# [13:2175:37] ServerId# [0:0:0] PipeClient# [13:2175:37] 2026-02-19T08:04:25.517995Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2176:37] Status# ERROR ClientId# [14:2176:37] ServerId# [0:0:0] PipeClient# [14:2176:37] 2026-02-19T08:04:25.518040Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2177:37] Status# ERROR ClientId# [15:2177:37] ServerId# [0:0:0] PipeClient# [15:2177:37] 2026-02-19T08:04:25.518078Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2178:37] Status# ERROR ClientId# [16:2178:37] ServerId# [0:0:0] PipeClient# [16:2178:37] 2026-02-19T08:04:25.518116Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2179:37] Status# ERROR ClientId# [17:2179:37] ServerId# [0:0:0] PipeClient# [17:2179:37] 2026-02-19T08:04:25.518158Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2180:37] Status# ERROR ClientId# [18:2180:37] ServerId# [0:0:0] PipeClient# [18:2180:37] 2026-02-19T08:04:25.518215Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2181:37] Status# ERROR ClientId# [19:2181:37] ServerId# [0:0:0] PipeClient# [19:2181:37] 2026-02-19T08:04:25.518258Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2182:37] Status# ERROR ClientId# [20:2182:37] ServerId# [0:0:0] PipeClient# [20:2182:37] 2026-02-19T08:04:25.518297Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2183:37] Status# ERROR ClientId# [21:2183:37] ServerId# [0:0:0] PipeClient# [21:2183:37] 2026-02-19T08:04:25.518348Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2184:37] Status# ERROR ClientId# [22:2184:37] ServerId# [0:0:0] PipeClient# [22:2184:37] 2026-02-19T08:04:25.518389Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2185:37] Status# ERROR ClientId# [23:2185:37] ServerId# [0:0:0] PipeClient# [23:2185:37] 2026-02-19T08:04:25.518427Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2186:37] Status# ERROR ClientId# [24:2186:37] ServerId# [0:0:0] PipeClient# [24:2186:37] 2026-02-19T08:04:25.518464Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2187:37] Status# ERROR ClientId# [25:2187:37] ServerId# [0:0:0] PipeClient# [25:2187:37] 2026-02-19T08:04:25.518504Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2188:37] Status# ERROR ClientId# [26:2188:37] ServerId# [0:0:0] PipeClient# [26:2188:37] 2026-02-19T08:04:25.518545Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2189:37] Status# ERROR ClientId# [27:2189:37] ServerId# [0:0:0] PipeClient# [27:2189:37] 2026-02-19T08:04:25.518583Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2190:37] Status# ERROR ClientId# [28:2190:37] ServerId# [0:0:0] PipeClient# [28:2190:37] 2026-02-19T08:04:25.518623Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2191:37] Status# ERROR ClientId# [29:2191:37] ServerId# [0:0:0] PipeClient# [29:2191:37] 2026-02-19T08:04:25.518687Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2192:37] Status# ERROR ClientId# [30:2192:37] ServerId# [0:0:0] PipeClient# [30:2192:37] 2026-02-19T08:04:25.518728Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2193:37] Status# ERROR ClientId# [31:2193:37] ServerId# [0:0:0] PipeClient# [31:2193:37] 2026-02-19T08:04:25.518766Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2194:37] Status# ERROR ClientId# [32:2194:37] ServerId# [0:0:0] PipeClient# [32:2194:37] 2026-02-19T08:04:25.684963Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2205} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.134769s 2026-02-19T08:04:25.685100Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1012} StateWork event processing took too much time Type# 2146435078 Duration# 0.134930s 2026-02-19T08:04:25.704771Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2264:79] expected 1 current 0 2026-02-19T08:04:25.704933Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2265:38] expected 1 current 0 2026-02-19T08:04:25.704967Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2266:38] expected 1 current 0 2026-02-19T08:04:25.705001Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2267:38] expected 1 current 0 2026-02-19T08:04:25.705033Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2268:38] expected 1 current 0 2026-02-19T08:04:25.705065Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2269:38] expected 1 current 0 2026-02-19T08:04:25.705101Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [ ... # 2147483688 VDiskId# [80000028:3:0:4:0] DiskIsOk# true 2026-02-19T08:04:34.712776Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483688 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.712806Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:3:0:6:0] DiskIsOk# true 2026-02-19T08:04:34.712836Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483688 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.712868Z 1 05h15m00.120992s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:3:0:7:0] DiskIsOk# true 2026-02-19T08:04:34.717029Z 1 05h15m00.121504s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:34.717114Z 1 05h15m00.121504s :BS_NODE DEBUG: [1] VDiskId# [80000028:3:0:0:0] -> [80000028:4:0:0:0] 2026-02-19T08:04:34.717668Z 1 05h15m00.121504s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483688 Items# [80000028:3:0:5:0]: 6:1000:1005 -> 24:1001:1016 ConfigTxSeqNo# 499 2026-02-19T08:04:34.725836Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483688 Success# true 2026-02-19T08:04:34.726112Z 2 05h15m00.121504s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:34.726190Z 2 05h15m00.121504s :BS_NODE DEBUG: [2] VDiskId# [80000028:3:0:1:0] -> [80000028:4:0:1:0] 2026-02-19T08:04:34.726290Z 3 05h15m00.121504s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:34.726337Z 3 05h15m00.121504s :BS_NODE DEBUG: [3] VDiskId# [80000028:3:0:2:0] -> [80000028:4:0:2:0] 2026-02-19T08:04:34.726424Z 21 05h15m00.121504s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-02-19T08:04:34.726518Z 21 05h15m00.121504s :BS_NODE DEBUG: [21] VDiskId# [80000028:3:0:6:0] -> [80000028:4:0:6:0] 2026-02-19T08:04:34.726607Z 4 05h15m00.121504s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-02-19T08:04:34.726653Z 4 05h15m00.121504s :BS_NODE DEBUG: [4] VDiskId# [80000028:3:0:3:0] -> [80000028:4:0:3:0] 2026-02-19T08:04:34.726718Z 6 05h15m00.121504s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.726793Z 24 05h15m00.121504s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-02-19T08:04:34.726838Z 24 05h15m00.121504s :BS_NODE DEBUG: [24] VDiskId# [80000028:4:0:5:0] PDiskId# 1001 VSlotId# 1016 created 2026-02-19T08:04:34.726926Z 24 05h15m00.121504s :BS_NODE DEBUG: [24] VDiskId# [80000028:4:0:5:0] status changed to INIT_PENDING 2026-02-19T08:04:34.727027Z 8 05h15m00.121504s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-02-19T08:04:34.727074Z 8 05h15m00.121504s :BS_NODE DEBUG: [8] VDiskId# [80000028:3:0:7:0] -> [80000028:4:0:7:0] 2026-02-19T08:04:34.727152Z 13 05h15m00.121504s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-02-19T08:04:34.727200Z 13 05h15m00.121504s :BS_NODE DEBUG: [13] VDiskId# [80000028:3:0:4:0] -> [80000028:4:0:4:0] 2026-02-19T08:04:34.727525Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483672 2026-02-19T08:04:34.728053Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728106Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:0:0] DiskIsOk# true 2026-02-19T08:04:34.728378Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728413Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:1:0] DiskIsOk# true 2026-02-19T08:04:34.728448Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728495Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:2:0] DiskIsOk# true 2026-02-19T08:04:34.728527Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728559Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:3:0] DiskIsOk# true 2026-02-19T08:04:34.728592Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728624Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:4:0] DiskIsOk# true 2026-02-19T08:04:34.728661Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728691Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:6:0] DiskIsOk# true 2026-02-19T08:04:34.728725Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:34.728756Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:3:0:7:0] DiskIsOk# true 2026-02-19T08:04:34.732876Z 1 05h15m00.122016s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2026-02-19T08:04:34.732961Z 1 05h15m00.122016s :BS_NODE DEBUG: [1] VDiskId# [80000018:3:0:0:0] -> [80000018:4:0:0:0] 2026-02-19T08:04:34.733486Z 1 05h15m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483672 Items# [80000018:3:0:5:0]: 6:1000:1003 -> 24:1001:1017 ConfigTxSeqNo# 500 2026-02-19T08:04:34.733529Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483672 Success# true 2026-02-19T08:04:34.733702Z 2 05h15m00.122016s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:34.741897Z 2 05h15m00.122016s :BS_NODE DEBUG: [2] VDiskId# [80000018:3:0:1:0] -> [80000018:4:0:1:0] 2026-02-19T08:04:34.742074Z 3 05h15m00.122016s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2026-02-19T08:04:34.742126Z 3 05h15m00.122016s :BS_NODE DEBUG: [3] VDiskId# [80000018:3:0:2:0] -> [80000018:4:0:2:0] 2026-02-19T08:04:34.742217Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2026-02-19T08:04:34.742270Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] VDiskId# [80000018:3:0:6:0] -> [80000018:4:0:6:0] 2026-02-19T08:04:34.742359Z 4 05h15m00.122016s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2026-02-19T08:04:34.742408Z 4 05h15m00.122016s :BS_NODE DEBUG: [4] VDiskId# [80000018:3:0:3:0] -> [80000018:4:0:3:0] 2026-02-19T08:04:34.742473Z 6 05h15m00.122016s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.742558Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2026-02-19T08:04:34.742608Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] VDiskId# [80000018:4:0:5:0] PDiskId# 1001 VSlotId# 1017 created 2026-02-19T08:04:34.742708Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] VDiskId# [80000018:4:0:5:0] status changed to INIT_PENDING 2026-02-19T08:04:34.742809Z 8 05h15m00.122016s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-02-19T08:04:34.742858Z 8 05h15m00.122016s :BS_NODE DEBUG: [8] VDiskId# [80000018:3:0:7:0] -> [80000018:4:0:7:0] 2026-02-19T08:04:34.742943Z 13 05h15m00.122016s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2026-02-19T08:04:34.742989Z 13 05h15m00.122016s :BS_NODE DEBUG: [13] VDiskId# [80000018:3:0:4:0] -> [80000018:4:0:4:0] 2026-02-19T08:04:34.744202Z 24 05h15m01.482432s :BS_NODE DEBUG: [24] VDiskId# [80000030:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.745091Z 24 05h15m01.602016s :BS_NODE DEBUG: [24] VDiskId# [80000018:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.745879Z 24 05h15m01.755920s :BS_NODE DEBUG: [24] VDiskId# [80000008:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.750051Z 24 05h15m01.943504s :BS_NODE DEBUG: [24] VDiskId# [80000028:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.750930Z 24 05h15m02.259968s :BS_NODE DEBUG: [24] VDiskId# [80000000:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.751780Z 24 05h15m03.080480s :BS_NODE DEBUG: [24] VDiskId# [80000025:5:0:7:0] status changed to REPLICATING 2026-02-19T08:04:34.752582Z 24 05h15m03.831992s :BS_NODE DEBUG: [24] VDiskId# [80000038:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.753389Z 24 05h15m04.960456s :BS_NODE DEBUG: [24] VDiskId# [80000010:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.755129Z 24 05h15m05.633944s :BS_NODE DEBUG: [24] VDiskId# [80000020:4:0:5:0] status changed to REPLICATING 2026-02-19T08:04:34.756284Z 24 05h15m13.326016s :BS_NODE DEBUG: [24] VDiskId# [80000018:4:0:5:0] status changed to READY 2026-02-19T08:04:34.757890Z 6 05h15m13.326528s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.757955Z 6 05h15m13.326528s :BS_NODE DEBUG: [6] VDiskId# [80000018:3:0:5:0] destroyed 2026-02-19T08:04:34.758756Z 24 05h15m19.564456s :BS_NODE DEBUG: [24] VDiskId# [80000010:4:0:5:0] status changed to READY 2026-02-19T08:04:34.760221Z 6 05h15m19.564968s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.760277Z 6 05h15m19.564968s :BS_NODE DEBUG: [6] VDiskId# [80000010:3:0:5:0] destroyed 2026-02-19T08:04:34.760671Z 24 05h15m21.581992s :BS_NODE DEBUG: [24] VDiskId# [80000038:4:0:5:0] status changed to READY 2026-02-19T08:04:34.762057Z 6 05h15m21.582504s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.762117Z 6 05h15m21.582504s :BS_NODE DEBUG: [6] VDiskId# [80000038:3:0:5:0] destroyed 2026-02-19T08:04:34.762274Z 24 05h15m22.315968s :BS_NODE DEBUG: [24] VDiskId# [80000000:4:0:5:0] status changed to READY 2026-02-19T08:04:34.763580Z 6 05h15m22.316480s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.763634Z 6 05h15m22.316480s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:0:5:0] destroyed 2026-02-19T08:04:34.763771Z 24 05h15m22.483920s :BS_NODE DEBUG: [24] VDiskId# [80000008:4:0:5:0] status changed to READY 2026-02-19T08:04:34.765063Z 6 05h15m22.484432s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.765115Z 6 05h15m22.484432s :BS_NODE DEBUG: [6] VDiskId# [80000008:3:0:5:0] destroyed 2026-02-19T08:04:34.766408Z 24 05h15m30.394432s :BS_NODE DEBUG: [24] VDiskId# [80000030:4:0:5:0] status changed to READY 2026-02-19T08:04:34.767692Z 6 05h15m30.394944s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.767743Z 6 05h15m30.394944s :BS_NODE DEBUG: [6] VDiskId# [80000030:3:0:5:0] destroyed 2026-02-19T08:04:34.767876Z 24 05h15m30.552480s :BS_NODE DEBUG: [24] VDiskId# [80000025:5:0:7:0] status changed to READY 2026-02-19T08:04:34.769135Z 6 05h15m30.552992s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.769188Z 6 05h15m30.552992s :BS_NODE DEBUG: [6] VDiskId# [80000025:4:0:7:0] destroyed 2026-02-19T08:04:34.769364Z 24 05h15m33.001504s :BS_NODE DEBUG: [24] VDiskId# [80000028:4:0:5:0] status changed to READY 2026-02-19T08:04:34.770624Z 6 05h15m33.002016s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.770677Z 6 05h15m33.002016s :BS_NODE DEBUG: [6] VDiskId# [80000028:3:0:5:0] destroyed 2026-02-19T08:04:34.772092Z 24 05h15m38.312944s :BS_NODE DEBUG: [24] VDiskId# [80000020:4:0:5:0] status changed to READY 2026-02-19T08:04:34.773397Z 6 05h15m38.313456s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2026-02-19T08:04:34.773448Z 6 05h15m38.313456s :BS_NODE DEBUG: [6] VDiskId# [80000020:3:0:5:0] destroyed |88.6%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |88.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] |88.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest >> ColumnBuildTest::BaseCase >> ColumnBuildTest::AlterMainTable_Failed >> ColumnBuildTest::Applying_Failed >> ColumnBuildTest::Locking_Failed >> ColumnBuildTest::Initiating_Failed >> ColumnBuildTest::Filling_Failed >> ColumnBuildTest::DoNotRestoreDeletedRows >> ColumnBuildTest::Rejecting >> ColumnBuildTest::ValidDefaultValue >> ColumnBuildTest::Cancelling >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |88.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun >> KqpUserConstraint::KqpReadNull-UploadNull |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> KqpUserConstraint::KqpReadNull+UploadNull |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut >> SequenceShardTests::Basics |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/actors/ut_service/fast/core-kqp-federated_query-actors-ut_service-fast >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/actors/ut_service/fast/core-kqp-federated_query-actors-ut_service-fast |88.6%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/actors/ut_service/fast/core-kqp-federated_query-actors-ut_service-fast >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> ColumnBuildTest::AlterMainTable_Failed [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag >> ColumnBuildTest::Locking_Failed [GOOD] >> ColumnBuildTest::Locking_DisableFlag >> SequenceProxy::Basics >> ColumnBuildTest::Filling_Failed [GOOD] >> ColumnBuildTest::Filling_DisableFlag >> ColumnBuildTest::Initiating_Failed [GOOD] >> ColumnBuildTest::Initiating_DisableFlag >> ColumnBuildTest::Applying_Failed [GOOD] >> ColumnBuildTest::Applying_DisableFlag >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> BsControllerTest::SelfHealMirror3dc [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> SequenceShardTests::NegativeIncrement [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndWait >> TListAllTopicsTests::PlainList >> DataShardDiskQuotas::DiskQuotaExceeded >> test.py::test[solomon-BadDownsamplingAggregation-] >> DescribeSchemaSecretsService::GetNewValue >> Mirror3of4::ReplicationHuge [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2026-02-19T08:04:39.080115Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-02-19T08:04:39.080252Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-02-19T08:04:39.091301Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-02-19T08:04:39.094864Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-02-19T08:04:39.094930Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-02-19T08:04:39.101439Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-02-19T08:04:39.101566Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2026-02-19T08:04:39.124350Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-02-19T08:04:39.124750Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-02-19T08:04:39.124798Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:33: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.124853Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-02-19T08:04:39.125155Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2026-02-19T08:04:39.125281Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2026-02-19T08:04:39.137234Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-02-19T08:04:39.137532Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:39.137621Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2026-02-19T08:04:39.149641Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.150010Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2026-02-19T08:04:39.150115Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2026-02-19T08:04:39.162223Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.162571Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-02-19T08:04:39.162659Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2026-02-19T08:04:39.174680Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.175034Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2026-02-19T08:04:39.175123Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2026-02-19T08:04:39.186987Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.187422Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2026-02-19T08:04:39.187463Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2026-02-19T08:04:39.187516Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.187726Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-02-19T08:04:39.187817Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2026-02-19T08:04:39.199661Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.200043Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-02-19T08:04:39.200087Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.200142Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.200360Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.200431Z node 1 :SEQUENCESHARD NOTICE: tx_drop_sequence.cpp:43: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.212387Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-02-19T08:04:39.212746Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.212807Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:33: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.212865Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-02-19T08:04:39.222814Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-02-19T08:04:39.222920Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-02-19T08:04:39.223340Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-02-19T08:04:39.223785Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-02-19T08:04:39.223972Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-02-19T08:04:39.228061Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:39.228127Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:39.228195Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.228451Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-02-19T08:04:39.228546Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2026-02-19T08:04:39.271332Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.271815Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2026-02-19T08:04:39.271910Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-02-19T08:04:39.284170Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-02-19T08:04:39.284599Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-02-19T08:04:39.284701Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2026-02-19T08:04:39.297031Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:39.297533Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2026-02-19T08:04:39.297633Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-02-19T08:04:39.320220Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-02-19T08:04:39.320653Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-02-19T08:04:39.320737Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2026-02-19T08:04:39.332602Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard ... Id: 43] Cache# 0 2026-02-19T08:04:40.291515Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2026-02-19T08:04:40.303050Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.303395Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2026-02-19T08:04:40.303429Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:66: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2026-02-19T08:04:40.303472Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-02-19T08:04:40.303629Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.303691Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.315090Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-02-19T08:04:40.315389Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.315479Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.326965Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-02-19T08:04:40.327176Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.327231Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-02-19T08:04:40.338530Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-02-19T08:04:40.338763Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.338795Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:54: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2026-02-19T08:04:40.338835Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.339001Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-02-19T08:04:40.339059Z node 3 :SEQUENCESHARD NOTICE: tx_freeze_sequence.cpp:68: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2026-02-19T08:04:40.350471Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-02-19T08:04:40.350817Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-02-19T08:04:40.350914Z node 3 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-02-19T08:04:40.362733Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-02-19T08:04:40.363150Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-02-19T08:04:40.363243Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-02-19T08:04:40.374925Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-02-19T08:04:40.375244Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-02-19T08:04:40.375279Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:48: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2026-02-19T08:04:40.375326Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-02-19T08:04:40.375495Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.375580Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2026-02-19T08:04:40.387142Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.762425Z node 4 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-02-19T08:04:40.762522Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-02-19T08:04:40.774040Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-02-19T08:04:40.777584Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-02-19T08:04:40.777652Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-02-19T08:04:40.779472Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2026-02-19T08:04:40.779602Z node 4 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2026-02-19T08:04:40.802148Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-02-19T08:04:40.802433Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.802532Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-02-19T08:04:40.814424Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.814745Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.814854Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-02-19T08:04:40.826798Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.827152Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-02-19T08:04:40.827244Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2026-02-19T08:04:40.839361Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.839725Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-02-19T08:04:40.839776Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:40.839835Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.840113Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2026-02-19T08:04:40.840197Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-02-19T08:04:40.852165Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-02-19T08:04:40.852534Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.852631Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-02-19T08:04:40.864259Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-02-19T08:04:40.864521Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-02-19T08:04:40.864589Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-02-19T08:04:40.876526Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |88.6%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2026-02-19T08:04:25.400517Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2026-02-19T08:04:25.400574Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2026-02-19T08:04:25.400650Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2026-02-19T08:04:25.400674Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2026-02-19T08:04:25.400716Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2026-02-19T08:04:25.400744Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2026-02-19T08:04:25.400808Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2026-02-19T08:04:25.400833Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2026-02-19T08:04:25.400864Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2026-02-19T08:04:25.400885Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2026-02-19T08:04:25.400918Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2026-02-19T08:04:25.400938Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2026-02-19T08:04:25.400970Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2026-02-19T08:04:25.400992Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2026-02-19T08:04:25.401060Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2026-02-19T08:04:25.401095Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2026-02-19T08:04:25.401144Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2026-02-19T08:04:25.401160Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2026-02-19T08:04:25.401182Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2026-02-19T08:04:25.401196Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2026-02-19T08:04:25.401220Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2026-02-19T08:04:25.401234Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2026-02-19T08:04:25.401255Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2026-02-19T08:04:25.401270Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2026-02-19T08:04:25.401292Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2026-02-19T08:04:25.401306Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2026-02-19T08:04:25.401331Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2026-02-19T08:04:25.401344Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2026-02-19T08:04:25.401370Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2026-02-19T08:04:25.401384Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2026-02-19T08:04:25.401403Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2026-02-19T08:04:25.401425Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2026-02-19T08:04:25.401452Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2026-02-19T08:04:25.401465Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2026-02-19T08:04:25.401486Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2026-02-19T08:04:25.401507Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2026-02-19T08:04:25.401532Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2026-02-19T08:04:25.401549Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2026-02-19T08:04:25.401592Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2026-02-19T08:04:25.401610Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2026-02-19T08:04:25.401632Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2026-02-19T08:04:25.401645Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2026-02-19T08:04:25.401682Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2026-02-19T08:04:25.401702Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2026-02-19T08:04:25.401749Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2026-02-19T08:04:25.401764Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2026-02-19T08:04:25.401797Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2026-02-19T08:04:25.401819Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2026-02-19T08:04:25.401849Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2026-02-19T08:04:25.401866Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2026-02-19T08:04:25.401897Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2026-02-19T08:04:25.401919Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2026-02-19T08:04:25.401957Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2026-02-19T08:04:25.401986Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2026-02-19T08:04:25.402024Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2026-02-19T08:04:25.402044Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2026-02-19T08:04:25.402080Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2026-02-19T08:04:25.402116Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2026-02-19T08:04:25.402165Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2026-02-19T08:04:25.402185Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2026-02-19T08:04:25.402219Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2026-02-19T08:04:25.402248Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2026-02-19T08:04:25.402286Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2026-02-19T08:04:25.402329Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2026-02-19T08:04:25.402373Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2026-02-19T08:04:25.402395Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2026-02-19T08:04:25.402432Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2026-02-19T08:04:25.402452Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2026-02-19T08:04:25.402485Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2026-02-19T08:04:25.402502Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2026-02-19T08:04:25.402547Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2026-02-19T08:04:25.402572Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2026-02-19T08:04:25.422893Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2026-02-19T08:04:25.424022Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2026-02-19T08:04:25.424065Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2026-02-19T08:04:25.424126Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2026-02-19T08:04:25.424152Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2026-02-19T08:04:25.424181Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2026-02-19T08:04:25.424205Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2026-02-19T08:04:25.424229Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2026-02-19T08:04:25.424253Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2026-02-19T08:04:25.424282Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2026-02-19T08:04:25.424307Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2026-02-19T08:04:25.424331Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2026-02-19T08:04:25.424355Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2026-02-19T08:04:25.424403Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2026-02-19T08:04:25.424440Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2026-02-19T08:04:25.424495Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2026-02-19T08:04:25.424519Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2026-02-19T08:04:25.424544Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2026-02-19T08:04:25.424578Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2026-02-19T08:04:25.424608Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2026-02-19T08:04:25.424638Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2026-02-19T08:04:25.424664Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2026-02-19T08:04:25.424697Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2026-02-19T08:04:25.424723Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2026-02-19T08:04:25.424748Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2026-02-19T08:04:25.424771Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2026-02-19T08:04:25.424805Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2026-02-19T08:04:25.424836Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2026-02-19T08:04:25.424861Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2026-02-19T08:04:25.424896Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2026-02-19T08:04:25.424930Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2026-02-19T08:04:25.424959Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2026-02-19T08:04:25.424985Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2026-02-19T08:04:25.425013Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2026-02-19T08:04:25.425037Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 19T08:04:39.904129Z 1 05h45m00.123040s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483688 Items# [80000028:1:0:0:0]: 2:1002:1002 -> 12:1001:1009 ConfigTxSeqNo# 537 2026-02-19T08:04:39.904170Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483688 Success# true 2026-02-19T08:04:39.904302Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-02-19T08:04:39.904370Z 17 05h45m00.123040s :BS_NODE DEBUG: [17] VDiskId# [80000028:1:1:1:0] -> [80000028:2:1:1:0] 2026-02-19T08:04:39.904440Z 2 05h45m00.123040s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.904520Z 20 05h45m00.123040s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2026-02-19T08:04:39.904557Z 20 05h45m00.123040s :BS_NODE DEBUG: [20] VDiskId# [80000028:1:1:2:0] -> [80000028:2:1:2:0] 2026-02-19T08:04:39.904623Z 5 05h45m00.123040s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-02-19T08:04:39.904659Z 5 05h45m00.123040s :BS_NODE DEBUG: [5] VDiskId# [80000028:1:0:1:0] -> [80000028:2:0:1:0] 2026-02-19T08:04:39.904718Z 8 05h45m00.123040s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-02-19T08:04:39.904753Z 8 05h45m00.123040s :BS_NODE DEBUG: [8] VDiskId# [80000028:1:0:2:0] -> [80000028:2:0:2:0] 2026-02-19T08:04:39.904816Z 26 05h45m00.123040s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2026-02-19T08:04:39.904850Z 26 05h45m00.123040s :BS_NODE DEBUG: [26] VDiskId# [80000028:1:2:0:0] -> [80000028:2:2:0:0] 2026-02-19T08:04:39.904910Z 29 05h45m00.123040s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2026-02-19T08:04:39.904944Z 29 05h45m00.123040s :BS_NODE DEBUG: [29] VDiskId# [80000028:1:2:1:0] -> [80000028:2:2:1:0] 2026-02-19T08:04:39.905008Z 12 05h45m00.123040s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-02-19T08:04:39.905050Z 12 05h45m00.123040s :BS_NODE DEBUG: [12] VDiskId# [80000028:2:0:0:0] PDiskId# 1001 VSlotId# 1009 created 2026-02-19T08:04:39.905133Z 12 05h45m00.123040s :BS_NODE DEBUG: [12] VDiskId# [80000028:2:0:0:0] status changed to INIT_PENDING 2026-02-19T08:04:39.905220Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-02-19T08:04:39.905267Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] VDiskId# [80000028:1:1:0:0] -> [80000028:2:1:0:0] 2026-02-19T08:04:39.905337Z 32 05h45m00.123040s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2026-02-19T08:04:39.905373Z 32 05h45m00.123040s :BS_NODE DEBUG: [32] VDiskId# [80000028:1:2:2:0] -> [80000028:2:2:2:0] 2026-02-19T08:04:39.905687Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483672 2026-02-19T08:04:39.906446Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906502Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:1:0] DiskIsOk# true 2026-02-19T08:04:39.906536Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906558Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:2:0] DiskIsOk# true 2026-02-19T08:04:39.906581Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906601Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:1:0:0] DiskIsOk# true 2026-02-19T08:04:39.906626Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906648Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:1:1:0] DiskIsOk# true 2026-02-19T08:04:39.906669Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906689Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:1:2:0] DiskIsOk# true 2026-02-19T08:04:39.906708Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906724Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:0:0] DiskIsOk# true 2026-02-19T08:04:39.906743Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906760Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:1:0] DiskIsOk# true 2026-02-19T08:04:39.906779Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2026-02-19T08:04:39.906797Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:2:2:0] DiskIsOk# true 2026-02-19T08:04:39.911309Z 1 05h45m00.123552s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483672 Items# [80000018:1:0:0:0]: 2:1002:1001 -> 12:1002:1009 ConfigTxSeqNo# 538 2026-02-19T08:04:39.911353Z 1 05h45m00.123552s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483672 Success# true 2026-02-19T08:04:39.911472Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2026-02-19T08:04:39.911524Z 17 05h45m00.123552s :BS_NODE DEBUG: [17] VDiskId# [80000018:1:1:1:0] -> [80000018:2:1:1:0] 2026-02-19T08:04:39.911587Z 2 05h45m00.123552s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.911649Z 20 05h45m00.123552s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2026-02-19T08:04:39.911684Z 20 05h45m00.123552s :BS_NODE DEBUG: [20] VDiskId# [80000018:1:1:2:0] -> [80000018:2:1:2:0] 2026-02-19T08:04:39.911745Z 5 05h45m00.123552s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2026-02-19T08:04:39.911777Z 5 05h45m00.123552s :BS_NODE DEBUG: [5] VDiskId# [80000018:1:0:1:0] -> [80000018:2:0:1:0] 2026-02-19T08:04:39.911840Z 8 05h45m00.123552s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2026-02-19T08:04:39.911882Z 8 05h45m00.123552s :BS_NODE DEBUG: [8] VDiskId# [80000018:1:0:2:0] -> [80000018:2:0:2:0] 2026-02-19T08:04:39.911955Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2026-02-19T08:04:39.911991Z 26 05h45m00.123552s :BS_NODE DEBUG: [26] VDiskId# [80000018:1:2:0:0] -> [80000018:2:2:0:0] 2026-02-19T08:04:39.912050Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2026-02-19T08:04:39.912083Z 29 05h45m00.123552s :BS_NODE DEBUG: [29] VDiskId# [80000018:1:2:1:0] -> [80000018:2:2:1:0] 2026-02-19T08:04:39.912145Z 12 05h45m00.123552s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2026-02-19T08:04:39.912175Z 12 05h45m00.123552s :BS_NODE DEBUG: [12] VDiskId# [80000018:2:0:0:0] PDiskId# 1002 VSlotId# 1009 created 2026-02-19T08:04:39.912233Z 12 05h45m00.123552s :BS_NODE DEBUG: [12] VDiskId# [80000018:2:0:0:0] status changed to INIT_PENDING 2026-02-19T08:04:39.912303Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2026-02-19T08:04:39.912337Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] VDiskId# [80000018:1:1:0:0] -> [80000018:2:1:0:0] 2026-02-19T08:04:39.912398Z 32 05h45m00.123552s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2026-02-19T08:04:39.912432Z 32 05h45m00.123552s :BS_NODE DEBUG: [32] VDiskId# [80000018:1:2:2:0] -> [80000018:2:2:2:0] 2026-02-19T08:04:39.913254Z 12 05h45m01.295528s :BS_NODE DEBUG: [12] VDiskId# [80000038:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.913620Z 4 05h45m02.065968s :BS_NODE DEBUG: [4] VDiskId# [8000001a:4:0:2:0] status changed to REPLICATING 2026-02-19T08:04:39.913895Z 11 05h45m02.594016s :BS_NODE DEBUG: [11] VDiskId# [80000048:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.914190Z 12 05h45m03.110480s :BS_NODE DEBUG: [12] VDiskId# [80000078:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.914595Z 12 05h45m03.412992s :BS_NODE DEBUG: [12] VDiskId# [80000068:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.914991Z 11 05h45m04.547456s :BS_NODE DEBUG: [11] VDiskId# [80000008:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.916253Z 12 05h45m05.109552s :BS_NODE DEBUG: [12] VDiskId# [80000018:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.916774Z 12 05h45m05.689040s :BS_NODE DEBUG: [12] VDiskId# [80000028:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.917177Z 12 05h45m05.917504s :BS_NODE DEBUG: [12] VDiskId# [80000058:2:0:0:0] status changed to REPLICATING 2026-02-19T08:04:39.917638Z 12 05h45m08.697992s :BS_NODE DEBUG: [12] VDiskId# [80000068:2:0:0:0] status changed to READY 2026-02-19T08:04:39.918589Z 2 05h45m08.698504s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.918637Z 2 05h45m08.698504s :BS_NODE DEBUG: [2] VDiskId# [80000068:1:0:0:0] destroyed 2026-02-19T08:04:39.919527Z 4 05h45m19.777968s :BS_NODE DEBUG: [4] VDiskId# [8000001a:4:0:2:0] status changed to READY 2026-02-19T08:04:39.920158Z 2 05h45m19.778480s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.920199Z 2 05h45m19.778480s :BS_NODE DEBUG: [2] VDiskId# [8000001a:3:0:2:0] destroyed 2026-02-19T08:04:39.920525Z 12 05h45m22.297480s :BS_NODE DEBUG: [12] VDiskId# [80000078:2:0:0:0] status changed to READY 2026-02-19T08:04:39.921296Z 2 05h45m22.297992s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.921350Z 2 05h45m22.297992s :BS_NODE DEBUG: [2] VDiskId# [80000078:1:0:0:0] destroyed 2026-02-19T08:04:39.921752Z 12 05h45m23.801528s :BS_NODE DEBUG: [12] VDiskId# [80000038:2:0:0:0] status changed to READY 2026-02-19T08:04:39.922534Z 2 05h45m23.802040s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.922570Z 2 05h45m23.802040s :BS_NODE DEBUG: [2] VDiskId# [80000038:1:0:0:0] destroyed 2026-02-19T08:04:39.922956Z 11 05h45m27.464016s :BS_NODE DEBUG: [11] VDiskId# [80000048:2:0:0:0] status changed to READY 2026-02-19T08:04:39.923596Z 2 05h45m27.464528s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.923637Z 2 05h45m27.464528s :BS_NODE DEBUG: [2] VDiskId# [80000048:1:0:0:0] destroyed 2026-02-19T08:04:39.924208Z 11 05h45m32.304456s :BS_NODE DEBUG: [11] VDiskId# [80000008:2:0:0:0] status changed to READY 2026-02-19T08:04:39.924845Z 2 05h45m32.304968s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.924880Z 2 05h45m32.304968s :BS_NODE DEBUG: [2] VDiskId# [80000008:1:0:0:0] destroyed 2026-02-19T08:04:39.924985Z 12 05h45m33.840504s :BS_NODE DEBUG: [12] VDiskId# [80000058:2:0:0:0] status changed to READY 2026-02-19T08:04:39.925614Z 2 05h45m33.841016s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.925649Z 2 05h45m33.841016s :BS_NODE DEBUG: [2] VDiskId# [80000058:1:0:0:0] destroyed 2026-02-19T08:04:39.926361Z 12 05h45m38.035552s :BS_NODE DEBUG: [12] VDiskId# [80000018:2:0:0:0] status changed to READY 2026-02-19T08:04:39.927245Z 2 05h45m38.036064s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.927288Z 2 05h45m38.036064s :BS_NODE DEBUG: [2] VDiskId# [80000018:1:0:0:0] destroyed 2026-02-19T08:04:39.927627Z 12 05h45m39.508040s :BS_NODE DEBUG: [12] VDiskId# [80000028:2:0:0:0] status changed to READY 2026-02-19T08:04:39.928346Z 2 05h45m39.508552s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2026-02-19T08:04:39.928380Z 2 05h45m39.508552s :BS_NODE DEBUG: [2] VDiskId# [80000028:1:0:0:0] destroyed >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate |88.6%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> ColumnBuildTest::Locking_DisableFlag [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> ColumnBuildTest::Initiating_DisableFlag [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.863578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.863660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.863718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.863771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.863808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.863843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.863871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.863901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.863969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.864030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.864831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.865099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:37.952870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:37.952938Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:37.964219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:37.964848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:37.965001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:37.975669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:37.976305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:37.977003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:37.977318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:37.980085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.980258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:37.981369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:37.981428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.981565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:37.981609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:37.981749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:37.981927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.137362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.140719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.140848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.140939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.141804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 57 ready parts: 1/1 2026-02-19T08:04:41.677744Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-02-19T08:04:41.677778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:41.677818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-02-19T08:04:41.677884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1709:3572] message: TxId: 281474976730757 2026-02-19T08:04:41.677933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:41.677967Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-02-19T08:04:41.677998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730757:0 2026-02-19T08:04:41.678055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:41.680290Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-02-19T08:04:41.680348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730757 2026-02-19T08:04:41.680405Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-02-19T08:04:41.680516Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-02-19T08:04:41.681790Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:04:41.681898Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:41.681950Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:04:41.683166Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-02-19T08:04:41.683255Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:41.683292Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:41.683379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:41.683407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1767:3618] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:41.683924Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:41.684152Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:41.684865Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:41.685078Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 234us result status StatusSuccess 2026-02-19T08:04:41.685519Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Filling_DisableFlag [GOOD] >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::Unlocking_Failed >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::Cancellation_Applying_DisableFlag >> ColumnBuildTest::Applying_DisableFlag [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_mirror3of4/unittest >> Mirror3of4::ReplicationHuge [GOOD] Test command err: 2026-02-19T08:04:03.753335Z 1 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.753668Z 2 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:1:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.753836Z 3 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:2:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.753952Z 4 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:3:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.754055Z 5 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:4:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.754148Z 6 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:5:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.754267Z 7 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:6:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.754370Z 8 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:7:0]: (0) SKELETON START Marker# BSVS37 2026-02-19T08:04:03.754665Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754739Z 1 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Sending TEvYardInit: pdiskGuid# 9399330149463396904 skeletonid# [1:139:13] selfid# [1:155:22] delay 0.000000 sec 2026-02-19T08:04:03.754774Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754798Z 2 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) Sending TEvYardInit: pdiskGuid# 14879273522359335334 skeletonid# [2:140:11] selfid# [2:156:12] delay 0.000000 sec 2026-02-19T08:04:03.754819Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754838Z 3 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) Sending TEvYardInit: pdiskGuid# 12615487623668151681 skeletonid# [3:141:11] selfid# [3:157:12] delay 0.000000 sec 2026-02-19T08:04:03.754858Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:3:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754878Z 4 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) Sending TEvYardInit: pdiskGuid# 7439767872182815966 skeletonid# [4:142:11] selfid# [4:158:12] delay 0.000000 sec 2026-02-19T08:04:03.754895Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:4:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754914Z 5 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) Sending TEvYardInit: pdiskGuid# 9456503838533414400 skeletonid# [5:143:11] selfid# [5:159:12] delay 0.000000 sec 2026-02-19T08:04:03.754932Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:5:0]: (0) LocalRecovery START 2026-02-19T08:04:03.754950Z 6 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) Sending TEvYardInit: pdiskGuid# 13635172666244371911 skeletonid# [6:144:11] selfid# [6:160:12] delay 0.000000 sec 2026-02-19T08:04:03.754992Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:6:0]: (0) LocalRecovery START 2026-02-19T08:04:03.755025Z 7 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) Sending TEvYardInit: pdiskGuid# 18271254860643360576 skeletonid# [7:145:11] selfid# [7:161:12] delay 0.000000 sec 2026-02-19T08:04:03.755047Z 8 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:7:0]: (0) LocalRecovery START 2026-02-19T08:04:03.755066Z 8 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) Sending TEvYardInit: pdiskGuid# 12589610975186272785 skeletonid# [8:146:11] selfid# [8:162:12] delay 0.000000 sec 2026-02-19T08:04:03.755405Z 1 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[1:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:0:0] PDiskGuid# 9399330149463396904 CutLogID# [1:139:13] WhiteboardProxyId# [1:122:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756255Z 1 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[1:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756375Z 2 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[2:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:1:0] PDiskGuid# 14879273522359335334 CutLogID# [2:140:11] WhiteboardProxyId# [2:124:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756455Z 2 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[2:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756507Z 3 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[3:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:2:0] PDiskGuid# 12615487623668151681 CutLogID# [3:141:11] WhiteboardProxyId# [3:126:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756593Z 3 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[3:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756663Z 4 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[4:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:3:0] PDiskGuid# 7439767872182815966 CutLogID# [4:142:11] WhiteboardProxyId# [4:128:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756711Z 4 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[4:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756787Z 5 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[5:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:4:0] PDiskGuid# 9456503838533414400 CutLogID# [5:143:11] WhiteboardProxyId# [5:130:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756857Z 5 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[5:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756905Z 6 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[6:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:5:0] PDiskGuid# 13635172666244371911 CutLogID# [6:144:11] WhiteboardProxyId# [6:132:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.756951Z 6 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[6:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.756999Z 7 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[7:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:6:0] PDiskGuid# 18271254860643360576 CutLogID# [7:145:11] WhiteboardProxyId# [7:134:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.757060Z 7 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[7:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.757107Z 8 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:465} PDiskMock[8:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:7:0] PDiskGuid# 12589610975186272785 CutLogID# [8:146:11] WhiteboardProxyId# [8:136:10] SlotId# 0 GroupSizeInUnits# 0 GetDiskFd# 0} 2026-02-19T08:04:03.757163Z 8 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:503} PDiskMock[8:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {} DiskFd# -1} Created# true 2026-02-19T08:04:03.758831Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-02-19T08:04:03.759938Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-02-19T08:04:03.760944Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-02-19T08:04:03.761953Z 4 00h00m00.000000s :BS_LOCALR ... g Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 585 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2026-02-19T08:04:41.569418Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 21 Cookie# 9}{Lsn# 22 Cookie# 10}{Lsn# 23 Cookie# 11}{Lsn# 24 Cookie# 12}} Recipient# [8:146:11] 2026-02-19T08:04:41.569457Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [8:363:30] 2026-02-19T08:04:41.572009Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:612} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2026-02-19T08:04:41.572060Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [7:353:30] 2026-02-19T08:04:41.572131Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:612} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 585 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2026-02-19T08:04:41.572162Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [8:363:30] 2026-02-19T08:04:41.572380Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.572627Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:612} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2026-02-19T08:04:41.572662Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [7:353:30] 2026-02-19T08:04:41.572700Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD(0x7c7ee7fbe0c0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021936851520} 2026-02-19T08:04:41.572744Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:612} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 585 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2026-02-19T08:04:41.572773Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:674} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [8:363:30] 2026-02-19T08:04:41.572826Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:764} PDiskMock[2:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021936851520} VDiskId# [0:4294967295:0:1:0] 2026-02-19T08:04:41.573525Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:817} PDiskMock[2:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137021936851520 StatusFlags# None} 2026-02-19T08:04:41.573649Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD FINISHED(0x7c7ee7fbe0c0): actualReadN# 1 origReadN# 1 2026-02-19T08:04:41.573901Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1369701526376808448} BlockedGeneration# 0} 2026-02-19T08:04:41.576813Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.577518Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD(0x7c7ee7fbee40): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021937041984} 2026-02-19T08:04:41.577858Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:764} PDiskMock[3:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021937041984} VDiskId# [0:4294967295:0:2:0] 2026-02-19T08:04:41.578783Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:817} PDiskMock[3:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137021937041984 StatusFlags# None} 2026-02-19T08:04:41.578918Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD FINISHED(0x7c7ee7fbee40): actualReadN# 1 origReadN# 1 2026-02-19T08:04:41.579018Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:1] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 2522623030983655424} BlockedGeneration# 0} 2026-02-19T08:04:41.581369Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.581600Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2026-02-19T08:04:41.582456Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.582632Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2026-02-19T08:04:41.583407Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.583612Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD(0x7c7ee7ebb1c0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021937098816} 2026-02-19T08:04:41.583691Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:764} PDiskMock[6:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137021937098816} VDiskId# [0:4294967295:0:5:0] 2026-02-19T08:04:41.584669Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:817} PDiskMock[6:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137021937098816 StatusFlags# None} 2026-02-19T08:04:41.584742Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD FINISHED(0x7c7ee7ebb1c0): actualReadN# 1 origReadN# 1 2026-02-19T08:04:41.584856Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1946162278680231936} {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 1946162278680231936} BlockedGeneration# 0} 2026-02-19T08:04:41.586588Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.586714Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} 2026-02-19T08:04:41.587211Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2026-02-19T08:04:41.587324Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Locking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.929148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.929241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.929302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.929343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.929374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.929425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.929454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.929479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.929570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.929665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.930560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.930869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.008280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.008333Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.018826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.019499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.019671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.030698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.031443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.032134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.032447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.035695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.035868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.037072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.037137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.037279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.037335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.037454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.037644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.161001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.161986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.162222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:41.912753Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-02-19T08:04:41.912789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:41.912825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-02-19T08:04:41.912879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1679:3544] message: TxId: 281474976730758 2026-02-19T08:04:41.912923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:41.912953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-02-19T08:04:41.912982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730758:0 2026-02-19T08:04:41.913058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:41.915596Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-02-19T08:04:41.915653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730758 2026-02-19T08:04:41.915750Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-02-19T08:04:41.915876Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-02-19T08:04:41.918200Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:04:41.918335Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:41.918383Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:04:41.919811Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-02-19T08:04:41.919935Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:41.919983Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:41.920103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:41.920150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1740:3592] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:41.920883Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:41.921261Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:41.922225Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:41.922414Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 218us result status StatusSuccess 2026-02-19T08:04:41.922982Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Cancelling [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> ColumnBuildTest::Rejecting [GOOD] >> ColumnBuildTest::Unlocking_DisableFlag >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false |88.6%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.949264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.949369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.949457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.949508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.949546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.949592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.949622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.949666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.949770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.949855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.950764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.951072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.047986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.048063Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.063794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.064467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.064663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.076919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.077657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.078429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.078750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.081866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.082031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.083451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.083534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.083671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.083718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.083818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.083994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.240400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.241426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.241579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.241685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.241848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.241948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.242459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 1474976730758 ready parts: 1/1 2026-02-19T08:04:42.531922Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-02-19T08:04:42.531946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:42.531972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-02-19T08:04:42.532018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1763:3627] message: TxId: 281474976730758 2026-02-19T08:04:42.532053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:42.532077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-02-19T08:04:42.532101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730758:0 2026-02-19T08:04:42.532144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:42.534320Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-02-19T08:04:42.534365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730758 2026-02-19T08:04:42.534428Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-02-19T08:04:42.534528Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-02-19T08:04:42.535685Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:04:42.535791Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.535849Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:04:42.537009Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-02-19T08:04:42.537099Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.537127Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:42.537219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:42.537252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1824:3675] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:42.537701Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:42.537952Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:42.538691Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:42.538863Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 174us result status StatusSuccess 2026-02-19T08:04:42.539184Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> SequenceProxy::DropRecreate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.914448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.914531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.914614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.914651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.914683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.914734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.914764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.914791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.914858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.914936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.915705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.915995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.005626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.005693Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.018968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.019593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.019748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.030680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.031364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.032067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.032353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.035355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.035501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.036634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.036714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.036824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.036867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.036963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.037156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.187231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.188972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.189039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.189098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.189168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.189256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... BUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:42.798507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-02-19T08:04:42.798542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:42.798568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-02-19T08:04:42.798619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1836:3699] message: TxId: 281474976730757 2026-02-19T08:04:42.798662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:42.798687Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-02-19T08:04:42.798708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730757:0 2026-02-19T08:04:42.798762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:42.800969Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-02-19T08:04:42.801054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730757 2026-02-19T08:04:42.801120Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-02-19T08:04:42.801238Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-02-19T08:04:42.802938Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-02-19T08:04:42.803053Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.803102Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:04:42.804516Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-02-19T08:04:42.804616Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.804645Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:42.804747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:42.804796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1896:3746] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:42.805351Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:42.805654Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-02-19T08:04:42.806415Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:42.806591Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 199us result status StatusSuccess 2026-02-19T08:04:42.806926Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Filling_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.858918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.858978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.859031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.859073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.859097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.859124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.859148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.859167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.859216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.859276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.859835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.860021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:37.916881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:37.916928Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:37.925829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:37.926435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:37.926553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:37.934890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:37.935466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:37.935948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:37.936173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:37.938425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.938537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:37.939302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:37.939359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.939459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:37.939490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:37.939562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:37.939695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.042781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.043982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.044017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.044056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.044108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 42.649547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409549] TDone opId# 281474976730759:0 ProgressState 2026-02-19T08:04:42.649621Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-02-19T08:04:42.649646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-02-19T08:04:42.649676Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-02-19T08:04:42.649699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-02-19T08:04:42.649756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730759, ready parts: 1/1, is published: true 2026-02-19T08:04:42.649801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1820:3684] message: TxId: 281474976730759 2026-02-19T08:04:42.649843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-02-19T08:04:42.649869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730759:0 2026-02-19T08:04:42.649891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730759:0 2026-02-19T08:04:42.649943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:42.652557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730759 2026-02-19T08:04:42.652609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730759 2026-02-19T08:04:42.652665Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730759 2026-02-19T08:04:42.652797Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730759 2026-02-19T08:04:42.654389Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:04:42.654497Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.654534Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:04:42.655729Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-02-19T08:04:42.655814Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:42.655844Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:42.655939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:42.655973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1968:3818] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:42.656510Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:42.656764Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:42.657544Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:42.657722Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 186us result status StatusSuccess 2026-02-19T08:04:42.658066Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:04:40.757409Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:04:40.854681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:04:40.868333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:04:40.868415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:04:40.868897Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:04:41.093533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:04:41.093619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:04:41.146069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488278764454 != 1771488278764458 2026-02-19T08:04:41.152259Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.196083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:04:41.284241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:04:41.567907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:41.580877Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:04:41.683690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.032131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1076:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.032235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1086:2907], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.032334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.033216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1090:2910], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.033423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.037877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:04:42.149910Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1091:2911], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:04:42.215963Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1147:2948] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:04:42.477737Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2118: SelfId: [1:1178:2969], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01khtew1de9tvf0rrcpyafr7r3. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YzQ5ZDBiYTktYmNjMzBiNWYtNWYxY2MzNWQtNmFmOTA1MjU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2026-02-19T08:04:42.479547Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [1:1178:2969], TxId: 281474976715660, task: 1. Ctx: { TraceId : 01khtew1de9tvf0rrcpyafr7r3. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YzQ5ZDBiYTktYmNjMzBiNWYtNWYxY2MzNWQtNmFmOTA1MjU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2026-02-19T08:04:42.483035Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [1:1179:2970], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtew1de9tvf0rrcpyafr7r3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YzQ5ZDBiYTktYmNjMzBiNWYtNWYxY2MzNWQtNmFmOTA1MjU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2026-02-19T08:04:42.488034Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YzQ5ZDBiYTktYmNjMzBiNWYtNWYxY2MzNWQtNmFmOTA1MjU=, ActorId: [1:1074:2900], ActorState: ExecuteState, LegacyTraceId: 01khtew1de9tvf0rrcpyafr7r3, Create QueryResponse for error on request, msg: status# INTERNAL_ERROR issues# { message: "Read from column index 1: got NULL from NOT NULL column" issue_code: 2012 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# >> ColumnBuildTest::DoNotRestoreDeletedRows [GOOD] >> ColumnBuildTest::DisabledAndEnabledFlag >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:04:40.733799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:04:40.821399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:04:40.835399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:04:40.835464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:04:40.835836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:04:41.075992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:04:41.076094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:04:41.140764Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488278493023 != 1771488278493027 2026-02-19T08:04:41.152123Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.194658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:04:41.275555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:04:41.553149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:41.565975Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:04:41.668242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.044766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1076:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.044896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1086:2907], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.044990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.045976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1090:2910], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.046194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:42.051106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:04:42.169990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1091:2911], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:04:42.243067Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1147:2948] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst |88.6%| [TM] {RESULT} ydb/core/blobstorage/ut_mirror3of4/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2026-02-19T08:04:40.472665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:40.472705Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:40.635092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:41.164657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-02-19T08:04:41.399311Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:04:41.399830Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmp2Hi5O2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:04:41.400461Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmp2Hi5O2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmp2Hi5O2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9152604011632714184 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:04:42.335431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:42.335530Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:42.416844Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:42.943197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-02-19T08:04:43.166923Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:04:43.167319Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmpz1kykz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:04:43.167550Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmpz1kykz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000536/r3tmp/tmpz1kykz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11293207478755948617 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:04:43.378485Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:353) 2026-02-19T08:04:43.651522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) >> ColumnBuildTest::Unlocking_Failed [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> Splitter::Simple >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> Splitter::Minimal [GOOD] >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] |88.6%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.973657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.973754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.973811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.973848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.973878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.973912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.973950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.973981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.974054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.974123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.974882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.975125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.035346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.035394Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.046831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.047463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.047596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.056047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.056685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.057253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.057519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.060073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.060196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.061034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.061080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.061153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.061184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.061263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.061405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.167958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.168933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.169202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 4.887170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976725761, at schemeshard: 72075186233409549 2026-02-19T08:04:44.887211Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-02-19T08:04:44.887232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976725761 2026-02-19T08:04:44.887255Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2026-02-19T08:04:44.887304Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1667:3535], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-02-19T08:04:44.888173Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking 2026-02-19T08:04:44.888236Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1667:3535], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:44.888264Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:04:44.889155Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected 2026-02-19T08:04:44.889237Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1667:3535], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:44.889260Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-02-19T08:04:44.889339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:04:44.889367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1683:3551] TestWaitNotification: OK eventTxId 106 2026-02-19T08:04:44.889796Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-02-19T08:04:44.890031Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-02-19T08:04:44.890466Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:44.890597Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 144us result status StatusSuccess 2026-02-19T08:04:44.890869Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] |88.7%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> DescribeSchemaSecretsService::GetNewValue [GOOD] >> DescribeSchemaSecretsService::GetUpdatedValue >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true |88.6%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.972151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.972217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.972265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.972291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.972319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.972357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.972388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.972408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.972502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.972554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.973178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.973421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.066031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.066104Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.078857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.079506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.079687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.090805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.091524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.092258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.092588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.095659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.095822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.096957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.097022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.097146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.097198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.097293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.097502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.245010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.248528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.248675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.248779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.248903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.248970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.249590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... BUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-02-19T08:04:45.614074Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725761:0 progress is 1/1 2026-02-19T08:04:45.614113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-02-19T08:04:45.614157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2026-02-19T08:04:45.614227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1889:3751] message: TxId: 281474976725761 2026-02-19T08:04:45.614287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-02-19T08:04:45.614335Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-02-19T08:04:45.614378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976725761:0 2026-02-19T08:04:45.614451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:45.616864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-02-19T08:04:45.616942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976725761 2026-02-19T08:04:45.617022Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976725761 2026-02-19T08:04:45.617174Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-02-19T08:04:45.618676Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-02-19T08:04:45.618801Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:45.618862Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:04:45.620215Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-02-19T08:04:45.620336Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:45.620380Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:45.620508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:45.620557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1949:3799] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:45.621179Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:45.621550Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-02-19T08:04:45.622285Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:45.622525Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 262us result status StatusSuccess 2026-02-19T08:04:45.623033Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.838371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.838466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.838537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.838579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.838614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.838669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.838701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.838749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.838831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.838900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.839766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.840103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:37.930498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:37.930587Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:37.942485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:37.943157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:37.943335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:37.954907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:37.955523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:37.956261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:37.956580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:37.959480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.959669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:37.960821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:37.960894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:37.961007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:37.961062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:37.961157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:37.961363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.113522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.114573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.114689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.114813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.114935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.115649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730757:0, at schemeshard: 72075186233409549 2026-02-19T08:04:45.898384Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409549] TDone opId# 281474976730757:0 ProgressState 2026-02-19T08:04:45.898451Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-02-19T08:04:45.898476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:45.898501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-02-19T08:04:45.898528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:45.898554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-02-19T08:04:45.898594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2606:4364] message: TxId: 281474976730757 2026-02-19T08:04:45.898634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-02-19T08:04:45.898662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-02-19T08:04:45.898695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730757:0 2026-02-19T08:04:45.898733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-02-19T08:04:45.900679Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-02-19T08:04:45.900721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730757 2026-02-19T08:04:45.900763Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730757 2026-02-19T08:04:45.900856Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-02-19T08:04:45.902226Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-02-19T08:04:45.902335Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:45.902392Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-02-19T08:04:45.903558Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-02-19T08:04:45.903643Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:45.903671Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-02-19T08:04:45.903753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:04:45.903786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2676:4422] TestWaitNotification: OK eventTxId 106 2026-02-19T08:04:45.904310Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-02-19T08:04:45.904550Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:45.905123Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:45.905301Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 186us result status StatusSuccess 2026-02-19T08:04:45.905654Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:38.150597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:38.150683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:38.150750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:38.150786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:38.150836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:38.150871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:38.150906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:38.150939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:38.151036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:38.151107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:38.151930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:38.152245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.241805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.241886Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.253791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.254457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.254622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.266247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.266950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.267703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.267982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.271290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.271503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.272722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.272783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.272897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.272947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.273047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.273225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.393561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.394959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.395004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730758:0, at schemeshard: 72075186233409549 2026-02-19T08:04:46.310283Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409549] TDone opId# 281474976730758:0 ProgressState 2026-02-19T08:04:46.310338Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-02-19T08:04:46.310358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:46.310385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-02-19T08:04:46.310403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:46.310423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-02-19T08:04:46.310464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2469:4227] message: TxId: 281474976730758 2026-02-19T08:04:46.310501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-02-19T08:04:46.310530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-02-19T08:04:46.310554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730758:0 2026-02-19T08:04:46.310608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-02-19T08:04:46.312727Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-02-19T08:04:46.312773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730758 2026-02-19T08:04:46.312821Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730758 2026-02-19T08:04:46.312910Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-02-19T08:04:46.314412Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-02-19T08:04:46.314501Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:46.314536Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-02-19T08:04:46.315514Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-02-19T08:04:46.315601Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:46.315630Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-02-19T08:04:46.315714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:04:46.315751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2540:4286] TestWaitNotification: OK eventTxId 106 2026-02-19T08:04:46.316195Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-02-19T08:04:46.316416Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-02-19T08:04:46.316912Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:46.317074Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 178us result status StatusSuccess 2026-02-19T08:04:46.317382Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:37.988099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:37.988191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.988254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:37.988288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:37.988343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:37.988378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:37.988404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:37.988432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:37.988547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:37.988612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:37.989402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:37.989672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:38.077951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:38.078018Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:38.089475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:38.090059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:38.090214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:38.101042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:38.101679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:38.102372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:38.102655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:38.105492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.105647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:38.106733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:38.106798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:38.106959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:38.107011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:38.107116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:38.107313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:38.258879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.259806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.259911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:38.260807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 97Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:04:46.280935Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-02-19T08:04:46.280964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:04:46.280995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2026-02-19T08:04:46.281046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1677:3542] message: TxId: 281474976730761 2026-02-19T08:04:46.281081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:04:46.281107Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2026-02-19T08:04:46.281130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730761:0 2026-02-19T08:04:46.281187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:04:46.283020Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2026-02-19T08:04:46.283078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730761 2026-02-19T08:04:46.283128Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730761 2026-02-19T08:04:46.283216Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1740:3592], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2026-02-19T08:04:46.284444Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-02-19T08:04:46.284550Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1740:3592], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:46.284586Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:04:46.285645Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-02-19T08:04:46.285741Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1740:3592], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:04:46.285790Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:04:46.285882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:04:46.285917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1757:3608] TestWaitNotification: OK eventTxId 105 2026-02-19T08:04:46.286397Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-02-19T08:04:46.286672Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-02-19T08:04:46.287231Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:04:46.287415Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 214us result status StatusSuccess 2026-02-19T08:04:46.287751Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase >> DataShardDiskQuotas::DiskQuotaExceeded [GOOD] >> DataShardDiskQuotas::ShardRestartOnCreateTable >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.478218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.478289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.478348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.478395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.478430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.478475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.478503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.478540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.478617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.478684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.479439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.479700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.538891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.538926Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.546196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.546663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.546806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.553322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.553798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.554227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.554379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.556389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.556531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.557357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.557394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.557459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.557485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.557510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.557617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.656333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.656944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.657537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... cookie: 0:108 msg type: 269090816 2026-02-19T08:04:48.443100Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000041 2026-02-19T08:04:48.443698Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:48.443790Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:48.443840Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-02-19T08:04:48.444019Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 Propose -> Done 2026-02-19T08:04:48.444075Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-02-19T08:04:48.444238Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:04:48.444291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:04:48.444348Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:04:48.445775Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:04:48.446366Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:04:48.447247Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:48.447278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:48.447381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:04:48.447442Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:48.447468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-02-19T08:04:48.447499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 108, path id: 41 FAKE_COORDINATOR: Erasing txId 108 2026-02-19T08:04:48.447557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:04:48.447585Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 108:0 ProgressState 2026-02-19T08:04:48.447687Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:04:48.447719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:04:48.447753Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:04:48.447782Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:04:48.447818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-02-19T08:04:48.447855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:04:48.447889Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-02-19T08:04:48.447919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 108:0 2026-02-19T08:04:48.447972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:04:48.448010Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2026-02-19T08:04:48.448041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:04:48.448065Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-02-19T08:04:48.448959Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:04:48.449018Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:04:48.449045Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:04:48.449085Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:04:48.449124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-02-19T08:04:48.449878Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:04:48.449934Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:04:48.449956Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:04:48.449977Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-02-19T08:04:48.449998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:04:48.450055Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-02-19T08:04:48.451471Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:04:48.452030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-02-19T08:04:48.452220Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-02-19T08:04:48.452260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-02-19T08:04:48.452588Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:04:48.452656Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:04:48.452690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:900:2869] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-02-19T08:04:48.454485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:04:48.454635Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2026-02-19T08:04:48.454740Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:04:48.455987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges)" TxId: 109 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 108, at schemeshard: 72057594046678944 2026-02-19T08:04:48.456162Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.234508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.234594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.234658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.234709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.234748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.234782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.234814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.234840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.234915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.235000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.235877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.236158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.326887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.326960Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.339055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.339682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.339895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.351393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.352072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.352815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.353066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.356026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.356198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.357382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.357440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.357549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.357593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.357635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.357816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.470609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.471987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... .cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:04:49.121595Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.122538Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.122921Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-02-19T08:04:49.123017Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-02-19T08:04:49.123134Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:04:49.123332Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:04:49.123674Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-02-19T08:04:49.123763Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:49.123940Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:04:49.124170Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-02-19T08:04:49.124238Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:04:49.124334Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:04:49.124740Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:04:49.124843Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:04:49.125068Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.125212Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.125250Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.125342Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:04:49.125493Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.125538Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.125612Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:04:49.127393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:04:49.127445Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-02-19T08:04:49.127507Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:04:49.127523Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:04:49.127784Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:04:49.127808Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-02-19T08:04:49.128669Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:04:49.128715Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-02-19T08:04:49.128859Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:04:49.128923Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:04:49.129142Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:04:49.129174Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:04:49.129493Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:04:49.129562Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.129614Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1042:2981] TestWaitNotification: OK eventTxId 103 2026-02-19T08:04:49.130083Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.130223Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 165us result status StatusPathDoesNotExist 2026-02-19T08:04:49.130334Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:04:49.130640Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.130760Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 133us result status StatusSuccess 2026-02-19T08:04:49.131082Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:49.131666Z node 6 :HIVE INFO: tablet_helpers.cpp:1586: [72057594037968897] TEvRequestHiveInfo, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.238415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.238483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.238542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.238576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.238603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.238636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.238656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.238676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.238729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.238796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.239502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.239708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.322710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.322756Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.334135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.334709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.334908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.344442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.345029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.345535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.345751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.348033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.348177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.349141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.349195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.349279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.349325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.349358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.349491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.498777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.499632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.499766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.499875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.500787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 04:49.114365Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.114390Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.114406Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.114561Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.114603Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:04:49.114696Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.114729Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.114763Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.114790Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.114822Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:04:49.114855Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.114887Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:04:49.114921Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:04:49.115055Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.115752Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.116150Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2026-02-19T08:04:49.116487Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.117493Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:04:49.117620Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:04:49.117851Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:04:49.118445Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-02-19T08:04:49.118534Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:49.118681Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 Forgetting tablet 72075186233409546 2026-02-19T08:04:49.119139Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7042: Transaction 281474976720657 reset current state at schemeshard 72075186233409547 because pipe to tablet 72075186233409548 disconnected 2026-02-19T08:04:49.119181Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Pipe attached message is found and resent into the new pipe, opId:281474976720657:4294967295, dst tableId: 72075186233409548, msg type: 269090816, msg cookie: 0:281474976720657, at schemeshardId: 72075186233409547 2026-02-19T08:04:49.120050Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:04:49.120169Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:04:49.120282Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409549 2026-02-19T08:04:49.120859Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:04:49.120962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409547 2026-02-19T08:04:49.121892Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:49.126537Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.126592Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.126692Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:04:49.127232Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.127283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.127349Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:04:49.129653Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:04:49.129732Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:04:49.130093Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:04:49.130114Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:04:49.130155Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:04:49.130167Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:04:49.130209Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:04:49.130233Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:04:49.130549Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:04:49.130627Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:04:49.130833Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:04:49.130868Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:04:49.131187Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:04:49.131264Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.131294Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1026:2965] TestWaitNotification: OK eventTxId 103 2026-02-19T08:04:49.131657Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.131793Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 158us result status StatusPathDoesNotExist 2026-02-19T08:04:49.131914Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.343583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.343658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.343715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.343749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.343779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.343811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.343839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.343867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.343949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.344022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.344826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.345076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.430259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.430314Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.441114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.441531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.441733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.452584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.453229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.453902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.454160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.456888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.457069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.458197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.458251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.458345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.458388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.458425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.458590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.589832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.590667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.590779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.590892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.590961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.591332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... teAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:04:49.586647Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:04:49.586678Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:04:49.586713Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:04:49.586753Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.586820Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:04:49.588056Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:04:49.588084Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:04:49.588156Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:04:49.588176Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:1008:2943], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-02-19T08:04:49.588266Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186233409546 Generation: 3 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-02-19T08:04:49.588317Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.588395Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 3, ActorId:[8:955:2902], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:04:49.589141Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72075186233409546, cookie: 0 2026-02-19T08:04:49.589183Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.589219Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:04:49.589317Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.589350Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.589382Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.589412Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.589443Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:04:49.589493Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.589519Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:04:49.589546Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:04:49.589602Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:04:49.590362Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.590413Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:04:49.591259Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:04:49.591296Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:04:49.591643Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:04:49.591714Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.591746Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1108:3039] TestWaitNotification: OK eventTxId 103 2026-02-19T08:04:49.592140Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.592271Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 147us result status StatusSuccess 2026-02-19T08:04:49.592561Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 5 ShardsInside: 3 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 5 MaxChildrenInDir: 3 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 7 MaxShardsInPath: 3 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:49.593038Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-02-19T08:04:49.593158Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 131us result status StatusSuccess 2026-02-19T08:04:49.593418Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 30 PathsLimit: 10 ShardsInside: 3 ShardsLimit: 10 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10 MaxChildrenInDir: 10 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 10 MaxShardsInPath: 10 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> DescribeSchemaSecretsService::GetUpdatedValue [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.208528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.208583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.208632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.208660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.208688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.208714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.208736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.208755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.208811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.208862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.209487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.209685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.278399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.278457Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.289677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.290293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.290487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.300635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.301211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.301834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.302063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.304463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.304617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.305515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.305569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.305653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.305685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.305735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.305938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.411226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.412524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... de 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> DeleteExternalShards 2026-02-19T08:04:49.697007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:04:49.698029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:04:49.698767Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.698824Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:04:49.698925Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 DeleteExternalShards -> DeletePrivateShards 2026-02-19T08:04:49.699161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:04:49.699227Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:04:49.700271Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:49.700301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:49.700389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:04:49.700526Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:49.700564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:04:49.700594Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:04:49.700634Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.700667Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2026-02-19T08:04:49.700709Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 DeletePrivateShards -> Done 2026-02-19T08:04:49.701577Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.701649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.701672Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:04:49.701695Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:04:49.701742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:04:49.702387Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.702444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.702463Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:04:49.702485Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:04:49.702506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:04:49.702550Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:04:49.703579Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.703622Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:04:49.703743Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:04:49.703792Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.703834Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:04:49.703866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.703906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:04:49.703942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.703981Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:04:49.704008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:04:49.704062Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:04:49.704405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.704460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.704534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:04:49.704768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.704808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.704889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:04:49.705250Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:04:49.706021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:04:49.707242Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:04:49.707310Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:04:49.707486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:04:49.707528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:04:49.707826Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:04:49.707899Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.707940Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:758:2747] TestWaitNotification: OK eventTxId 102 2026-02-19T08:04:49.708314Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.708464Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2026-02-19T08:04:49.708598Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.230394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.230475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.230529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.230556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.230590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.230619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.230641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.230663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.230714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.230779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.231376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.231609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.304698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.304746Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.312944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.313513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.313702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.323301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.323901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.324445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.324679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.327205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.327336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.328262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.328313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.328393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.328427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.328471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.328637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.436038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.436725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.436822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.436897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.436988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.437474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... n: 18446744073709551615 2026-02-19T08:04:49.886164Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-02-19T08:04:49.886205Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:04:49.887185Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:04:49.887270Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.887311Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.887334Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.887354Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:04:49.887562Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.887599Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:04:49.887710Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.887747Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.887784Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.887812Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.887848Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:04:49.887896Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.887938Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:04:49.887969Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:04:49.888100Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.889104Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-02-19T08:04:49.889336Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:04:49.889432Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:04:49.889643Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 Forgetting tablet 72075186233409546 2026-02-19T08:04:49.890502Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:49.890671Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:04:49.890834Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-02-19T08:04:49.890925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:04:49.891076Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:04:49.891704Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-02-19T08:04:49.891778Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:04:49.891900Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:04:49.892328Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.892377Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.892482Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:04:49.893125Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:04:49.893177Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.893245Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:04:49.893531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.893583Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.896199Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:04:49.896260Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-02-19T08:04:49.896349Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:04:49.896369Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:04:49.896433Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:04:49.896448Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-02-19T08:04:49.896593Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:04:49.896624Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-02-19T08:04:49.896790Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:04:49.896878Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:04:49.897125Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:04:49.897163Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:04:49.897490Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:04:49.897561Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.897600Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1037:2977] TestWaitNotification: OK eventTxId 103 2026-02-19T08:04:49.898038Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:49.898183Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 164us result status StatusPathDoesNotExist 2026-02-19T08:04:49.898297Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.089282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.089375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.089454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.089507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.089542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.089581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.089612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.089642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.089725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.089810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.090664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.090957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.181381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.181445Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.193451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.194111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.194315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.205629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.206329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.207098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.207352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.210325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.210509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.211732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.211791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.211909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.211960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.212002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.212169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.363365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.364953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.365564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... eave, operationId 103:0, at tablet# 72057594046678944 2026-02-19T08:04:49.714921Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.715063Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:04:49.716189Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-02-19T08:04:49.716288Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-02-19T08:04:49.716583Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:49.716697Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:49.716745Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-02-19T08:04:49.716966Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 Propose -> Done 2026-02-19T08:04:49.717010Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-02-19T08:04:49.717114Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.717238Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:588: Send TEvUpdateTenantSchemeShard, to actor: [8:803:2772], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2026-02-19T08:04:49.718408Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6346: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2026-02-19T08:04:49.718498Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2026-02-19T08:04:49.718640Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:04:49.718841Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:49.718874Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:04:49.719001Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:49.719036Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:04:49.719237Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.719281Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2026-02-19T08:04:49.719315Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 Done -> Done 2026-02-19T08:04:49.719863Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:04:49.719934Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:04:49.719967Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:04:49.720000Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-02-19T08:04:49.720035Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-02-19T08:04:49.720104Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:04:49.721032Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-02-19T08:04:49.721058Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-02-19T08:04:49.721132Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-02-19T08:04:49.721149Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:898:2840], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-02-19T08:04:49.721320Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-02-19T08:04:49.721374Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:49.721434Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:803:2772], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:04:49.722273Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409546, cookie: 0 2026-02-19T08:04:49.723027Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.723075Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:04:49.723207Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.723253Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.723295Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:04:49.723333Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.723378Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:04:49.723422Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:04:49.723464Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:04:49.723505Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:04:49.723578Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.724747Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:04:49.724828Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-02-19T08:04:49.725986Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:04:49.726037Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:04:49.726422Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:04:49.726514Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.726555Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1026:2966] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.185594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.185663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.185724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.185755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.185800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.185828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.185848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.185866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.185915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.185961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.186542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.186741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.248835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.248901Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.260285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.260758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.260978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.272454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.273158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.273958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.274239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.277365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.277549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.278808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.278880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.278992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.279052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.279114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.279311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.396154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.397916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.398003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.398068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.398160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.398242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.398312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ommon_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2026-02-19T08:04:49.829056Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ConfigureParts -> Propose 2026-02-19T08:04:49.830804Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.830952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.830998Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.831054Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2026-02-19T08:04:49.831120Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.831254Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:04:49.832526Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-02-19T08:04:49.832697Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-02-19T08:04:49.833002Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:49.833114Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:49.833171Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-02-19T08:04:49.833503Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:04:49.833565Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-02-19T08:04:49.833692Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.833835Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:771:2745], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:04:49.835405Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:49.835459Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:04:49.835651Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:49.835700Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:04:49.836074Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.836136Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2026-02-19T08:04:49.836195Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Done -> Done 2026-02-19T08:04:49.836770Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.836865Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:04:49.836918Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:04:49.836968Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:04:49.837008Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-02-19T08:04:49.837093Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:04:49.838853Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:04:49.838896Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:04:49.838993Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:04:49.839028Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.839062Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:04:49.839097Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.839129Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:04:49.839201Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:713:2702] message: TxId: 102 2026-02-19T08:04:49.839251Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:04:49.839289Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:04:49.839322Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:04:49.839462Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:49.839778Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:04:49.840863Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:04:49.840913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:964:2904] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-02-19T08:04:49.842916Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:04:49.843044Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2026-02-19T08:04:49.843080Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2026-02-19T08:04:49.843185Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-02-19T08:04:49.843230Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-02-19T08:04:49.844906Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:49.845142Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> Splitter::Crit [GOOD] >> Splitter::CritSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.205909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.205979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.206042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.206074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.206095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.206117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.206136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.206154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.206195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.206234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.206789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.206982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.263223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.263267Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.272752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.273415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.273599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.282925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.283511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.284093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.284333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.287103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.287252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.288307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.288353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.288461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.288514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.288552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.288674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.410191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.411963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:04:50.682335Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:50.682376Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:04:50.682741Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6346: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2026-02-19T08:04:50.682837Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186234409546 2026-02-19T08:04:50.682994Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2026-02-19T08:04:50.683343Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:04:50.683430Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:04:50.683471Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:04:50.683514Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-02-19T08:04:50.683555Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:04:50.683631Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:04:50.684924Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-02-19T08:04:50.684954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-02-19T08:04:50.685041Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-02-19T08:04:50.685065Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:898:2840], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-02-19T08:04:50.685185Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 2 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-02-19T08:04:50.685245Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:04:50.685324Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:803:2772], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 2, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:04:50.685945Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186234409546, cookie: 0 2026-02-19T08:04:50.686141Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:04:50.686197Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:04:50.686401Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:04:50.686439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:04:50.686768Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:04:50.686839Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:04:50.686874Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [8:1042:2982] TestWaitNotification: OK eventTxId 104 2026-02-19T08:04:50.687252Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:04:50.687399Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 167us result status StatusSuccess 2026-02-19T08:04:50.687749Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:04:50.688278Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2026-02-19T08:04:50.688427Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 172us result status StatusSuccess 2026-02-19T08:04:50.688788Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409546 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976720657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 |88.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> Splitter::CritSimple [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; >> DataShardDiskQuotas::ShardRestartOnCreateTable [GOOD] >> DataShardDiskQuotas::ShardRestartOnSplitDst |88.7%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:04:41.302973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:41.303047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.303118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:41.303156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:41.303192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:41.303235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:41.303258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:41.303284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:41.303341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:41.303405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:41.304154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:41.304402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:41.384095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:41.384158Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.393356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:41.393904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:41.394037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:41.400631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:41.401055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:41.401508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:41.401694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:41.403468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.403601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:41.404379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:41.404416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:41.404496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:41.404533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:41.404568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:41.404676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:41.487809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:41.488975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 03158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1181:3083], at schemeshard: 72075186233409546, txId: 116, path id: 1 2026-02-19T08:04:55.403205Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1181:3083], at schemeshard: 72075186233409546, txId: 116, path id: 39 2026-02-19T08:04:55.403709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-02-19T08:04:55.403765Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2026-02-19T08:04:55.404013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-02-19T08:04:55.404486Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-02-19T08:04:55.404586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-02-19T08:04:55.404624Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-02-19T08:04:55.404662Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 19 2026-02-19T08:04:55.404702Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 14 2026-02-19T08:04:55.405364Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-02-19T08:04:55.405432Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-02-19T08:04:55.405456Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-02-19T08:04:55.405476Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 39], version: 1 2026-02-19T08:04:55.405496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 39] was 4 2026-02-19T08:04:55.405552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2026-02-19T08:04:55.407256Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2026-02-19T08:04:55.407380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2026-02-19T08:04:55.407428Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-02-19T08:04:55.407762Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-02-19T08:04:55.407945Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2026-02-19T08:04:55.408048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-02-19T08:04:55.408090Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-02-19T08:04:55.408188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-02-19T08:04:55.408239Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2026-02-19T08:04:55.408311Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-02-19T08:04:55.408400Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 116:0 CreateParts -> ConfigureParts 2026-02-19T08:04:55.409067Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-02-19T08:04:55.410226Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-02-19T08:04:55.411405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2026-02-19T08:04:55.411598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-02-19T08:04:55.411662Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2026-02-19T08:04:55.411748Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2026-02-19T08:04:55.412124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1126 RawX2: 30064774112 } TxBody: "\n\342\004\n\007Table11\020\'\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\':\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2026-02-19T08:04:55.414502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2026-02-19T08:04:55.414630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2026-02-19T08:04:55.435557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2026-02-19T08:04:55.437003Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2026-02-19T08:04:55.437236Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |88.7%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |88.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> DescribeSchemaSecretsService::GetDroppedValue [GOOD] >> DescribeSchemaSecretsService::GetInParallel |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] |88.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> TSentinelUnstableTests::BSControllerCantChangeStatus >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |88.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_disk_quotas/unittest >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:04:43.496837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:04:43.603720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:04:43.619957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:04:43.620024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:04:43.620456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:04:43.860180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:04:43.860278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:04:43.920672Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488281427553 != 1771488281427557 2026-02-19T08:04:43.932152Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:43.977657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:04:44.067012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... Setting hard disk quota to 1 byte 2026-02-19T08:04:44.322323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:839:2732], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:04:44.322378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:04:44.322403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:04:44.322470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:836:2730], Recipient [1:404:2403]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:04:44.322488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:04:44.397800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } DatabaseQuotas { data_size_hard_quota: 1 } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:04:44.397969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:04:44.398144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:04:44.398179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:04:44.398330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:04:44.398382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:44.398451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:04:44.398927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:04:44.399028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:04:44.399085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:04:44.399112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-02-19T08:04:44.399289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.399312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.399369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:04:44.399412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:04:44.399441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-02-19T08:04:44.399467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 CreateParts -> ConfigureParts 2026-02-19T08:04:44.399530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:04:44.399745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [1:839:2732], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:04:44.399777Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:04:44.399807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:04:44.399965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:04:44.399993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-02-19T08:04:44.400074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.400098Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.400140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:04:44.400167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:04:44.400209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 ConfigureParts -> Propose 2026-02-19T08:04:44.400265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:04:44.400504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:04:44.400522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-02-19T08:04:44.400578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.400598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:04:44.400630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:04:44.400659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:04:44.400722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:04:44.400742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:04:44.400776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:04:44.403052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:04:44.403384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:04:44.403417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-02-19T08:04:44.403519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:04:44.403694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269091328, Sender [1:400:2399], Recipient [1:404:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 281474976715657 2026-0 ... h:3216: StateWork, received event# 268829696, Sender [3:921:2782], Recipient [3:929:2788]: NKikimr::TEvTablet::TEvTabletDead 2026-02-19T08:05:04.599597Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:05:04.599708Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:05:04.600888Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435084, Sender [3:404:2403], Recipient [3:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:05:04.600928Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5494: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:05:04.600984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:05:04.601026Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-02-19T08:05:04.601104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2026-02-19T08:05:04.602158Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:1249:3049], Recipient [3:404:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:04.602190Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:04.602210Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:05:04.602448Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:927:2786], Recipient [3:404:2403]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [3:927:2786] ServerId: [3:932:2789] } 2026-02-19T08:05:04.602476Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:05:04.602510Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2026-02-19T08:05:04.602712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:05:04.602762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:05:04.602978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:1541:3274], Recipient [3:404:2403]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:1541:3274] ServerId: [3:1542:3275] } 2026-02-19T08:05:04.603003Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:05:04.603023Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-02-19T08:05:04.603199Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:05:04.603293Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-02-19T08:05:04.899079Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:404:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:05:04.899175Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:05:04.899266Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [3:404:2403], Recipient [3:404:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:05:04.899299Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... Inserting the 4th row 2026-02-19T08:05:05.094316Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 278003712, Sender [3:1573:3289], Recipient [3:1396:3161]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:05:05.094412Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037890 2026-02-19T08:05:05.094556Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435074, Sender [3:1396:3161], Recipient [3:1396:3161]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:05:05.094597Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3243: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:05:05.094708Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037890 2026-02-19T08:05:05.094858Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037890, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:05:05.094949Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:260: Table /Root/table2, shard: 72075186224037890, write point (Uint32 : 4) 2026-02-19T08:05:05.095007Z node 3 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:39:1] 2026-02-19T08:05:05.095110Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CheckWrite 2026-02-19T08:05:05.095174Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-02-19T08:05:05.095223Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CheckWrite 2026-02-19T08:05:05.095267Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-02-19T08:05:05.095321Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BuildAndWaitDependencies 2026-02-19T08:05:05.095365Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:05:05.095439Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037890 2026-02-19T08:05:05.095481Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-02-19T08:05:05.095513Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-02-19T08:05:05.095541Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BlockFailPoint 2026-02-19T08:05:05.095568Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BlockFailPoint 2026-02-19T08:05:05.095594Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-02-19T08:05:05.095619Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BlockFailPoint 2026-02-19T08:05:05.095644Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit ExecuteWrite 2026-02-19T08:05:05.095670Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit ExecuteWrite 2026-02-19T08:05:05.095714Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037890 2026-02-19T08:05:05.095771Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:05:05.095909Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037890, row count=1 2026-02-19T08:05:05.095973Z node 3 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:05:05.096044Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is ExecutedNoMoreRestarts 2026-02-19T08:05:05.096086Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit ExecuteWrite 2026-02-19T08:05:05.096138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit FinishProposeWrite 2026-02-19T08:05:05.096182Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-02-19T08:05:05.096272Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is DelayCompleteNoMoreRestarts 2026-02-19T08:05:05.096306Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit FinishProposeWrite 2026-02-19T08:05:05.096349Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:05:05.096390Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:05:05.096437Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-02-19T08:05:05.096464Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:05:05.096499Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037890 has finished 2026-02-19T08:05:05.107533Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037890 2026-02-19T08:05:05.107629Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-02-19T08:05:05.107707Z node 3 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037890 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:05:05.107815Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |88.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_disk_quotas/unittest |88.8%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |88.8%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] |88.8%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |88.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |88.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun >> KqpExecuter::TestSuddenAbortAfterReady >> TSequence::CreateTableWithDefaultFromSequence >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> QueryActorTest::SimpleQuery >> DescribeSchemaSecretsService::GetInParallel [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes >> TestFederatedQueryHelpers::TestCheckNestingDepth [GOOD] >> TestFederatedQueryHelpers::TestTruncateIssues [GOOD] >> TestFederatedQueryHelpers::TestValidateResultSetColumns >> TGRpcRateLimiterTest::CreateResource >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut/unittest >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> TListAllTopicsTests::ListLimitAndPaging [GOOD] |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |88.8%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |88.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |88.8%| [TM] {RESULT} ydb/core/kqp/federated_query/ut/unittest >> test_auditlog.py::test_single_dml_query_logged[update] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/list_topics/ut/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] Test command err: 2026-02-19T08:04:41.453992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608484233626201642:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:04:41.454694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:04:41.473983Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:04:41.628179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:04:41.653291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:04:41.653395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:04:41.691330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:04:41.702201Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.703409Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608484233626201611:2081] 1771488281452445 != 1771488281452448 2026-02-19T08:04:41.780235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/000522/r3tmp/yandexje0ZbV.tmp 2026-02-19T08:04:41.780256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/000522/r3tmp/yandexje0ZbV.tmp 2026-02-19T08:04:41.781287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/000522/r3tmp/yandexje0ZbV.tmp 2026-02-19T08:04:41.781479Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:04:41.795916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:04:41.809802Z INFO: TTestServer started on Port 8535 GrpcPort 23102 PQClient connected to localhost:23102 2026-02-19T08:04:41.958081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:42.017760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.460780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:04:43.927833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484242216137240:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:43.927905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484242216137250:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:43.927947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:43.928791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484242216137256:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:43.928857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:43.931263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:04:43.939519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608484242216137254:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:04:44.072704Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608484246511104617:2640] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:04:44.094084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.118719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.187827Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608484246511104625:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:04:44.189397Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWFlNTE0OWItYjk0MGZjZDAtYmU5OTQwNDItM2NlZWIyMzI=, ActorId: [1:7608484242216137238:2333], ActorState: ExecuteState, LegacyTraceId: 01khtew38perypfvv7v8q3b0ae, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:04:44.191070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.191239Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608484246511104913:2818] 2026-02-19T08:04:46.454358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608484233626201642:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:04:46.454401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:04:50.048736Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-02-19T08:04:50.050201Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-02-19T08:04:50.115672Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608484272280908912:2929] connected; active server actors: 1 2026-02-19T08:04:50.116166Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:04:50.116248Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:04:50.116549Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037895][] pipe [1:7608484272280908922:2937] connected; active server actors: 1 2026-02-19T08:04:50.116683Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:04:50.116999Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:04:50.117173Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:04:50.117213Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:04:50.117239Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037892] doesn't have tx writes info 2026-02-19T08:04:50.117518Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][topic1] Discovered subdomain [OwnerId ... ateIdle] Process pending events. Count 1 2026-02-19T08:05:10.136189Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.136207Z node 3 :PERSQUEUE DEBUG: partition.cpp:2445: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-02-19T08:05:10.136254Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:05:10.136265Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.136279Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:05:10.136291Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.136318Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.136331Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:05:10.136343Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.136355Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:05:10.136365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.136405Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72075186224037896][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:05:10.136838Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:05:10.136899Z node 3 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037896][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771488310179, TxId 281474976710677 2026-02-19T08:05:10.136917Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.136928Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:05:10.136938Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.136960Z node 3 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:05:10.137093Z node 3 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037896][Partition][0][StateIdle] Schedule reply tx done 281474976710677 2026-02-19T08:05:10.137107Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:05:10.137117Z node 3 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037896][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:05:10.137131Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.137296Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:05:10.138883Z node 3 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037896][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:05:10.139100Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037896][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:05:10.139166Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037896][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:05:10.139183Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.139194Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.139205Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.139217Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.139227Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.139244Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:05:10.139574Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2026-02-19T08:05:10.139628Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:05:10.154418Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.154452Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.154466Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.154484Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.154497Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.200000Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.200035Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.200050Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.200069Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.200082Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.236825Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.236857Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.236870Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.236887Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.236900Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.254967Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.254995Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.255008Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.255024Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.255037Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.300620Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.300655Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.300669Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.300686Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.300702Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.337193Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.337225Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.337238Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.337255Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.337269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:05:10.355122Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:05:10.355154Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.355168Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:05:10.355186Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:05:10.355202Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] |88.8%| [TM] {RESULT} ydb/core/persqueue/public/list_topics/ut/unittest >> test_auditlog.py::test_dml_begin_commit_logged >> test_disk.py::TestSafeDiskBreak::test_erase_method |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors >> QueryActorTest::SimpleQuery [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> QueryActorTest::Rollback >> listing_batching.py::TestListingBatching::test_listing_batching_solomon >> listing_paging.py::TestListingPaging::test_listing_paging_solomon >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> data_paging.py::TestDataPaging::test_data_paging_solomon |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> DescribeSchemaSecretsService::GetSameValueMultipleTimes [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> TSentinelUnstableTests::InsertOrAssignUpdatesRequestsDuringRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:05:11.560235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:05:11.654564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:05:11.680114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:05:11.680195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:05:11.680811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:05:11.953044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:11.953177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:12.029497Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488308804407 != 1771488308804411 2026-02-19T08:05:12.042844Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:12.090818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:12.229704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:05:12.624604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:05:12.624669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:05:12.624707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:05:12.625178Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:05:12.637937Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:13.023724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:13.176567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:13.415384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:13.767012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:14.045325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:14.819404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1915:3519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:14.819744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:14.820808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1988:3538], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:14.820908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:14.856803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:15.071521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:15.390350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:15.682674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:15.964471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:16.333378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:16.644747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:16.984988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:17.435410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:17.435530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:17.436055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:17.436158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:17.436235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2806:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:17.443244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:17.666068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2808:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:05:17.733110Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2868:4229] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Got NKikimr::NGRpcService::TGrpcRequestCall> [1:835:2729] [1:8320808721877066593:7169396] Got NKikimr::NGRpcService::TEvGrpcMon::TEvReportPeer [0:6011864930757669447:2188150] [1:8320808721877066593:7169396] Got NActors::IEventHandle [1:3155:4465] [1:835:2729] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:8678280833929343339:121] [1:3155:4465] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:3154:4464] [1:67:2114] Got NKikimr::NKqp::NWorkload::TEvSubscribeOnPoolChanges [1:7742373267896299883:25708] [1:67:2114] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:67:2114] Got NActors::IEventHandle [1:3157:4467] [1:307:2350] Got NKikimr::NKqp::NWorkload::TEvPlaceRequestIntoPool [1:7742373267896299883:25708] [1:3154:4464] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:67:2114] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3157:4467] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:67:2114] Got NActors::IEventHandle [1:3158:4468] [1:307:2350] Got NActors::IEventHandle [1:3159:4469] [1:312:2355] Got NActors::IEventHandle [1:3160:4470] [1:3158:4468] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3157:4467] [1:3159:4469] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:307:2350] [1:3157:4467] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3160:4470] Got NActors::IEventHandle [1:3161:4471] [1:312:2355] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvUpdatePoolSubscription [1:2876:4236] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NWorkload::TEvUpdatePoolInfo [1:67:2114] [1:2876:4236] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3160:4470] [1:3161:4471] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:3158:4468] [1:3160:4470] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:7742373267896299883:25708] [1:3158:4468] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:2876:4236] [1:3158:4468] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvPlaceRequestIntoPoolResponse [1:7742373267896299883:25708] [1:2876:4236] Got NKikimr::NKqp::NWorkload::TEvContinueRequest [1:3154:4464] [1:2876:4236] Got NKikimr::NKqp::NPrivateEvents::TEvCompileRequest [1:8101253777303040363:6646889] [1:3154:4464] Got NActors::IEventHandle [1:3162:4472] [1:303:2346] Got NActors::IEventHandle [1:3164:4474] [1:3162:4472] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3164:4474] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2897:4251] [1:312:2355] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2897:4251] [1:312:2355] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2898:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2899:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2900:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2901:4251] [1:2898:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2901:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2903:4251] [1:2899:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2905:4251] [1:2900:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2903:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2905:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2901:4251] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2903:4251] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2905:4251] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2898:4251] [1:2901:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2899:4251] [1:2903:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2900:4251] [1:2905:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2898:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2899:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:312:2355] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2900:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2898:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2899:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2900:4251] [1:2897:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2901:4251] [1:2898:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2901:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2903:4251] [1:2899:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2903:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2905:4251] [1:2900:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2905:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2901:4251] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2903:4251] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2905:4251] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2898:4251] [1:2901:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2899:4251] [1:2903:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2900:4251] [1:2905:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2898:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2899:4251] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:312:2355] [1:2897:4251] Got NActors::IEventHandle [1:3165:4475] [1:312:2355] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2897:4251] [1:2900:4251] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3164:4474] [1:3165:4475] Got NActors::IEventHandle [1:3166:4476] [1:3164:4474] Got NKikimr::NStat::TEvStatistics::TEvGetStatistics [1:8534995652929746003:6644585] [1:3166:4476] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:78:2125] Got NActors::IEventHandle [1:3167:4477] [1:312:2355] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:78:2125] [1:3167:4477] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:78:2125] Got NActors::IEventHandle [1:3168:4478] [1:312:2355] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:78:2125] [1:3168:4478] Got NKikimr::NStat::TEvStatistics::TEvGetStatisticsResult [1:3166:4476] [1:78:2125] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4472] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4472] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4472] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:303:2346] [1:3162:4472] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:3154:4464] [1:303:2346] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3154:4464] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3169:4464] [1:69:2116] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3169:4464] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3169:4464] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3169:4464] 2026-02-19T08:05:20.676052Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3169:4464] TxId: 281474976715673. Ctx: { TraceId: 01khtex6h4dqfgg9j8sy5tm9sd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjA0MjIxOTItMTUzZDI5NDMtM2FkMWIwOTgtYjFhYzAzMjA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3169:4464] Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3169:4464] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3154:4464] [1:3169:4464] 2026-02-19T08:05:20.676392Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YjA0MjIxOTItMTUzZDI5NDMtM2FkMWIwOTgtYjFhYzAzMjA=, ActorId: [1:3154:4464], ActorState: ExecuteState, LegacyTraceId: 01khtex6h4dqfgg9j8sy5tm9sd, Create QueryResponse for error on request, msg: status# STATUS_CODE_UNSPECIFIED issues# trace_id# Got NKikimr::NKqp::NWorkload::TEvCleanupRequest [1:2876:4236] [1:3154:4464] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFinishRequestInPool [1:7742373267896299883:25708] [1:2876:4236] Got NKikimr::NSysView::TEvSysView::TEvCollectQueryStats [1:6014387330472966483:2188150] [1:3154:4464] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3154:4464] Got NActors::TEvents::TEvPoison [1:3169:4464] [1:3169:4464] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3169:4464] [1:308:2351] Got NKikimr::NKqp::NWorkload::TEvCleanupResponse [1:3154:4464] [1:2876:4236] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3170:4464] [1:69:2116] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3170:4464] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3170:4464] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3170:4464] 2026-02-19T08:05:20.677205Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3170:4464] TxId: 281474976715674. Ctx: { TraceId: 01khtex6h4dqfgg9j8sy5tm9sd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjA0MjIxOTItMTUzZDI5NDMtM2FkMWIwOTgtYjFhYzAzMjA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3170:4464] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3154:4464] [1:3170:4464] 2026-02-19T08:05:20.677389Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3414} SessionId: ydb://session/3?node_id=1&id=YjA0MjIxOTItMTUzZDI5NDMtM2FkMWIwOTgtYjFhYzAzMjA=, ActorId: [1:3154:4464], ActorState: CleanupState, LegacyTraceId: 01khtex6h4dqfgg9j8sy5tm9sd, Failed to cleanup issues# trace_id# Got NActors::TEvents::TEvPoison [1:3170:4464] [1:3170:4464] Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3170:4464] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:67:2114] [1:3154:4464] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3170:4464] [1:308:2351] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:3155:4465] [1:67:2114] Got NActors::TEvents::TEvPoison [1:3156:4466] [1:67:2114] >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |88.9%| [TM] {RESULT} ydb/core/kqp/executer_actor/ut/unittest |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |88.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] >> TSentinelUnstableTests::InsertOrAssignUpdatesRequestsDuringRetry [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::InsertOrAssignUpdatesRequestsDuringRetry [GOOD] Test command err: 2026-02-19T08:05:04.750576Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:05:04.750643Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:05:04.750702Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:05:04.750733Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:05:04.750776Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:05:04.750845Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:05:04.752086Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:05:04.757015Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... Response: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.204535Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.204694Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.204762Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:05:26.205243Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 10:42, status# ACTIVE, required status# FAULTY, reason# PrevState# InitialFormatReadError State# InitialFormatReadError StateCounter# 1 StateLimit# 1, dry run# 0 2026-02-19T08:05:26.205317Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:05:26.205517Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:05:26.205552Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:05:26.205674Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:05:26.225825Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-02-19T08:05:26.225944Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-02-19T08:05:26.225976Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-02-19T08:05:26.226010Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-02-19T08:05:26.226039Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-02-19T08:05:26.226069Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-02-19T08:05:26.226100Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-02-19T08:05:26.226622Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { }, cookie# 1 2026-02-19T08:05:26.226677Z node 9 :CMS ERROR: sentinel.cpp:1389: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-02-19T08:05:26.226960Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.227413Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.227567Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.227738Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.227870Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.228003Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.228127Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.228251Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:05:26.228312Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:05:26.228742Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 10:42, status# FAULTY, required status# INACTIVE, reason# PrevState# InitialFormatReadError State# Normal StateCounter# 1 current# FAULTY, dry run# 0 2026-02-19T08:05:26.228793Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:05:26.229018Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-02-19T08:05:26.229056Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:42 >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] >> data_paging.py::TestDataPaging::test_data_paging_solomon [GOOD] >> data_paging.py::TestDataPaging::test_listing_paging_monitoring |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |88.9%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |88.9%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> DescribeSchemaSecretsService::FailWithoutGrants [GOOD] >> DescribeSchemaSecretsService::GroupGrants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2026-02-19T08:04:39.458476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:39.458543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.458608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:39.458639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:39.458675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:39.458705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:39.458725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:39.458746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:39.458792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.458850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:39.459477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:39.459692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:39.518944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:39.518994Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:39.530467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:39.531080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:39.531266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:39.538910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:39.539114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:39.539868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:39.540156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:39.542533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.542703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:39.543922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:39.543986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.544163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:39.544211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:39.544260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:39.544456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:39.668917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.669989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.670529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rts: 2/3 2026-02-19T08:05:33.466309Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:2 progress is 2/3 2026-02-19T08:05:33.466356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2026-02-19T08:05:33.466399Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-02-19T08:05:33.466653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:05:33.466693Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:05:33.466721Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-02-19T08:05:33.466793Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1231:3012] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2026-02-19T08:05:33.466893Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269552132, Sender [7:244:2156], Recipient [7:1231:3012]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2026-02-19T08:05:33.466927Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3220: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-02-19T08:05:33.466956Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 104 datashard 72075186233409549 state Ready 2026-02-19T08:05:33.467003Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409549 Got TEvSchemaChangedResult from SS at 72075186233409549 2026-02-19T08:05:33.467444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:05:33.467486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:05:33.467543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:05:33.467595Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:05:33.467670Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:05:33.467695Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-02-19T08:05:33.467723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:05:33.467758Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-02-19T08:05:33.467784Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:05:33.467816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-02-19T08:05:33.467880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:983:2801] message: TxId: 104 2026-02-19T08:05:33.467931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:05:33.467983Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:05:33.468019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:05:33.468578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-02-19T08:05:33.468661Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-02-19T08:05:33.468704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:1 2026-02-19T08:05:33.468768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-02-19T08:05:33.468812Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-02-19T08:05:33.468834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:2 2026-02-19T08:05:33.468889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-02-19T08:05:33.476244Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:05:33.476430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:05:33.476535Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:983:2801] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2026-02-19T08:05:33.476746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:05:33.476801Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1284:3051] 2026-02-19T08:05:33.477329Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1286:3053], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:33.477375Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:33.477405Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-02-19T08:05:33.480679Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [8:957:2104], Recipient [7:244:2156] 2026-02-19T08:05:33.480759Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:05:33.483460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:05:33.483977Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:05:33.484055Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:05:33.484374Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:05:33.489011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:05:33.489386Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2026-02-19T08:05:33.489467Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-02-19T08:05:33.490558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:05:33.490619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-02-19T08:05:33.491081Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [7:1359:3126], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:33.491147Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:33.491196Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:05:33.491376Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [7:983:2801], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2026-02-19T08:05:33.491421Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:05:33.491510Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:05:33.491649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:05:33.491698Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1357:3124] 2026-02-19T08:05:33.491956Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1359:3126], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:33.492019Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:33.492061Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:05:11.576504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:05:11.726795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:05:11.746673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:05:11.746762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:05:11.747233Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:05:12.042637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:12.042786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:12.120753Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488308912719 != 1771488308912723 2026-02-19T08:05:12.129866Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:12.178018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:12.305686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:05:12.626668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:12.641632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:12.758246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:13.136569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:989:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:13.136726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:13.137233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:13.138031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1005:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:13.138310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:13.142967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:13.271157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1003:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:05:13.350715Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1061:2884] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } ... waiting for SysViewsRoster update finished 2026-02-19T08:05:18.487648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:05:18.564509Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:05:18.611044Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:05:18.611450Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:05:18.611515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:05:18.997590Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:19.004685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:19.048341Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771488314882736 != 1771488314882740 2026-02-19T08:05:19.057205Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:19.106511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:19.188703Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:05:19.618951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:19.641945Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:19.780466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:20.248989Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1036:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:20.249101Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1047:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:20.249170Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:20.258645Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1052:2884], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:20.258839Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:20.277621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:20.475282Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1050:2882], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:05:20.515802Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1109:2922] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } it ... ce.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:05:31.820331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:31.834750Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:31.959706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:32.366311Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:989:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:32.366435Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:32.366513Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:32.367755Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1005:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:32.367954Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:32.372403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:32.624055Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1003:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:05:32.665307Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1061:2884] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } ... waiting for SysViewsRoster update finished 2026-02-19T08:05:40.642608Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:05:40.675108Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:05:40.705046Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:05:40.705143Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:05:40.705423Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:320:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:05:41.029604Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:41.029759Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:41.057403Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1771488335291824 != 1771488335291828 2026-02-19T08:05:41.079432Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:41.138161Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:41.222473Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:05:41.604431Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:41.624649Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:41.758119Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:42.241684Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:989:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:42.241849Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:999:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:42.242315Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:42.243136Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1005:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:42.243364Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:42.261262Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:42.475950Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1003:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:05:42.517493Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1061:2884] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:05:42.689176Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:1071:2893], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2026-02-19T08:05:42.692299Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=MmViOTVhMmUtOTVlNDdhMTAtMzBlYjJmYzctZWUwNjVkNzg=, ActorId: [4:986:2834], ActorState: ExecuteState, LegacyTraceId: 01khtexw6g9q3khnem8x8d239q, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } } tx_id# trace_id# 2026-02-19T08:05:42.755431Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:1093:2909], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2026-02-19T08:05:42.762453Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=Mjc0YTVkYzktMzQ0Y2E2MzktODlmNTA3YzQtMjRlOGQ3MmM=, ActorId: [4:1085:2901], ActorState: ExecuteState, LegacyTraceId: 01khtexwn8395np4r70jnjg9kh, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } } tx_id# trace_id# >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> DescribeSchemaSecretsService::GroupGrants [GOOD] >> DescribeSchemaSecretsService::BatchRequest >> QueryActorTest::StreamQuery [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2026-02-19T08:05:09.453965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608484354961578475:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:05:09.454026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:05:09.774090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:05:09.809686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:09.809838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:09.865022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:09.867765Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:09.869000Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608484354961578255:2081] 1771488309392968 != 1771488309392971 2026-02-19T08:05:10.080065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:05:10.175144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:10.235042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:10.375612Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7608484359256546425:2541], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-02-19T08:05:10.390848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:12.341910Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:12.354146Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972585397.197505s seconds to be completed 2026-02-19T08:05:12.361666Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, workerId: [1:7608484367846481074:2312], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:05:12.361906Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:05:12.362875Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:05:12.362932Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:05:12.362957Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:12.364722Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7608484359256546425:2541], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, TxId: , text: SELECT 42 2026-02-19T08:05:12.365048Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608484367846481074:2312] 2026-02-19T08:05:12.365089Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7608484367846481076:2553] 2026-02-19T08:05:12.721659Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 3, sender: [1:7608484367846481075:2313], selfId: [1:7608484354961578495:2267], source: [1:7608484367846481074:2312] 2026-02-19T08:05:12.722534Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7608484359256546425:2541], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, TxId: 2026-02-19T08:05:12.722587Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7608484359256546425:2541], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, TxId: 2026-02-19T08:05:12.722960Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=1&id=MWE5ODA2N2YtZmNlYWFiZGItNDNmMDQ3NjgtZDZhMmU4YWE=, workerId: [1:7608484367846481074:2312], local sessions count: 0 2026-02-19T08:05:13.762439Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608484371097672664:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:05:13.762497Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:05:13.779932Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:05:13.842810Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:13.844300Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608484371097672634:2081] 1771488313760228 != 1771488313760231 2026-02-19T08:05:13.878479Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:13.878552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:13.883029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:14.044606Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:05:14.064577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:14.071115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:14.103242Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7608484375392640799:2535], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-02-19T08:05:14.770532Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:16.971908Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:16.975299Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972585392.576339s seconds to be completed 2026-02-19T08:05:16.977330Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, workerId: [2:7608484383982575426:2311], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:05:16.978174Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:05:16.978225Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:05:16.978257Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:05:16.978275Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:16.980179Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7608484375392640799:2535], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, TxId: , text: DECLARE $k As Uint64; DECLARE $v As String; UPSERT INTO TestTable (Key, Value) VALUES ($k, $v) 2026-02-19T08:05:16.980484Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7608484383982575426:2311] 2026-02-19T08:05:16.980515Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7608484383982575451:2548] 2026-02-19T08:05:17.127380Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 3, sender: [2:7608484383982575445:2314], selfId: [2:7608484371097672896:2270], source: [2:7608484383982575426:2311] 2026-02-19T08:05:17.127525Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7608484375392640799:2535], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, TxId: 2026-02-19T08:05:17.127560Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7608484375392640799:2535], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, TxId: 2026-02-19T08:05:17.127830Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=2&id=MjFhZjFhNzEtNzdjNmQwNWYtZmM5MDc4MTMtMjIxMjI0OGU=, workerId: [2:7608484383982575426:2311], local sessions count: 0 2026-02-19T08:05:17.129318Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7608484388277542777:2560], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-02-19T08:05:17.129493Z node 2 :KQP_PROXY DEBUG: ... 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:05:28.127271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:28.137111Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:05:28.144715Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:05:28.250037Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-02-19T08:05:28.254289Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:34.804700Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:34.809772Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972585374.741882s seconds to be completed 2026-02-19T08:05:34.812283Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=4&id=MmRlMzgxZmUtM2E5NzJmNzctOGJlYTg5MWMtYmVkOGU5ZjA=, workerId: [4:7608484461034829880:2315], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:05:34.812513Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:05:34.812558Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:05:34.812581Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:05:34.812598Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:05:34.813605Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-02-19T08:05:34.813762Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], Start read next stream part 2026-02-19T08:05:34.821727Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtexmyx2bdrr35y6xhz0k56", Created new session, sessionId: ydb://session/3?node_id=4&id=MjM2YjI0YWMtZDlhZDA4MDQtZDRiYTUwOWUtYTc4M2RkMGY=, workerId: [4:7608484461034829907:2318], database: /dc-1, longSession: 0, local sessions count: 2 2026-02-19T08:05:34.822078Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtexmyx2bdrr35y6xhz0k56, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=MjM2YjI0YWMtZDlhZDA4MDQtZDRiYTUwOWUtYTc4M2RkMGY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7608484461034829907:2318] 2026-02-19T08:05:34.822109Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7608484461034829908:2565] 2026-02-19T08:05:34.823885Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608484461034829909:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:34.823989Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:34.824413Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608484461034829921:2322], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:34.824451Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608484461034829922:2323], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:34.824567Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:05:34.828596Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:05:34.844331Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608484461034829925:2324], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:05:34.913226Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608484461034829977:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:05:42.565864Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:05:42.565907Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:44.297766Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:05:44.300667Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:333: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], Cancel stream request 2026-02-19T08:05:44.300740Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484435265026055:2542], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MmRlMzgxZmUtM2E5NzJmNzctOGJlYTg5MWMtYmVkOGU5ZjA=, TxId: 2026-02-19T08:05:44.304541Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=MmRlMzgxZmUtM2E5NzJmNzctOGJlYTg5MWMtYmVkOGU5ZjA=, workerId: [4:7608484461034829880:2315], local sessions count: 1 2026-02-19T08:05:44.311542Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-02-19T08:05:44.311756Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972585365.239876s seconds to be completed 2026-02-19T08:05:44.314124Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=4&id=ZWQ4MTc5NzgtODcyZjY3NGEtYTY3NTcyODgtZmY0YTMwZTY=, workerId: [4:7608484503984503022:2351], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:05:44.314344Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:05:44.315057Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-02-19T08:05:44.315153Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], Start read next stream part 2026-02-19T08:05:44.317013Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtexy7v9jcz4fkkwdxsasnr", Created new session, sessionId: ydb://session/3?node_id=4&id=YTg1ZTYzNWEtMTM1MTZlZGQtYmIwNTU1NTEtMjUyOWQzZjg=, workerId: [4:7608484503984503025:2352], database: /dc-1, longSession: 0, local sessions count: 3 2026-02-19T08:05:44.317259Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtexy7v9jcz4fkkwdxsasnr, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YTg1ZTYzNWEtMTM1MTZlZGQtYmIwNTU1NTEtMjUyOWQzZjg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7608484503984503025:2352] 2026-02-19T08:05:44.317286Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7608484503984503026:2650] 2026-02-19T08:05:44.451479Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488344437, txId: 281474976710663] shutting down 2026-02-19T08:05:44.458820Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:05:44.464795Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], Start read next stream part 2026-02-19T08:05:44.464822Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtexy7v9jcz4fkkwdxsasnr", Forwarded response to sender actor, requestId: 5, sender: [4:7608484503984503023:2648], selfId: [4:7608484430970058032:2187], source: [4:7608484503984503025:2352] 2026-02-19T08:05:44.465016Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:05:44.465064Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7608484503984503019:2648], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZWQ4MTc5NzgtODcyZjY3NGEtYTY3NTcyODgtZmY0YTMwZTY=, TxId: 2026-02-19T08:05:44.465467Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=YTg1ZTYzNWEtMTM1MTZlZGQtYmIwNTU1NTEtMjUyOWQzZjg=, workerId: [4:7608484503984503025:2352], local sessions count: 2 2026-02-19T08:05:44.466551Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=ZWQ4MTc5NzgtODcyZjY3NGEtYTY3NTcyODgtZmY0YTMwZTY=, workerId: [4:7608484503984503022:2351], local sessions count: 1 >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:244:2157] sender: [1:245:2060] recipient: [1:226:2145] 2026-02-19T08:04:39.564872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:39.564968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.565057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:39.565109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:39.565144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:39.565179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:39.565205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:39.565230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:39.565293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.565355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:39.566130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:39.566404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:39.632688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:39.632732Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:39.639851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:39.639932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:39.640044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:39.646918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:39.647187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:39.647644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:39.648259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:39.651551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.651698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:39.653066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:39.653118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.653165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:39.653196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:39.653222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:39.653305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:39.749265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.749946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:39.750645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... tModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:05:51.628142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:05:51.628198Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:05:51.628624Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [7:1092:2911], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.628701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.628748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:05:51.628917Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [7:989:2808], Recipient [7:248:2159]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2026-02-19T08:05:51.628952Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:05:51.629043Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:05:51.629167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:05:51.629213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1090:2909] 2026-02-19T08:05:51.629408Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1092:2911], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.629446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.629524Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2026-02-19T08:05:51.630037Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [8:963:2104], Recipient [7:248:2159] 2026-02-19T08:05:51.630096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:05:51.632224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 963 RawX2: 34359740472 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:05:51.632570Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:05:51.632748Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:05:51.633011Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:05:51.636174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:05:51.636574Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-02-19T08:05:51.636664Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2026-02-19T08:05:51.637137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-02-19T08:05:51.637186Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-02-19T08:05:51.637579Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [7:1098:2917], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.637639Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.637687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:05:51.637868Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [7:989:2808], Recipient [7:248:2159]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2026-02-19T08:05:51.637910Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:05:51.637989Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-02-19T08:05:51.638122Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:05:51.638187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:1096:2915] 2026-02-19T08:05:51.638439Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1098:2917], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.638485Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.638528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-02-19T08:05:51.638990Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [8:963:2104], Recipient [7:248:2159] 2026-02-19T08:05:51.639042Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:05:51.641178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 963 RawX2: 34359740472 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:05:51.641517Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:05:51.641614Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2026-02-19T08:05:51.642036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:05:51.647492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:05:51.647867Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-02-19T08:05:51.647942Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-02-19T08:05:51.648416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-02-19T08:05:51.648465Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-02-19T08:05:51.648864Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [7:1104:2923], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.648929Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:05:51.648968Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:05:51.649117Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [7:989:2808], Recipient [7:248:2159]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2026-02-19T08:05:51.649155Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:05:51.649249Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:05:51.649377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:05:51.649427Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:1102:2921] 2026-02-19T08:05:51.649660Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1104:2923], Recipient [7:248:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.649910Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:05:51.649970Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] >> listing_batching.py::TestListingBatching::test_listing_batching_solomon [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |88.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |88.9%| [TM] {RESULT} ydb/library/query_actor/ut/unittest |88.9%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/004542/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log.k95lhjoe.txt 2026-02-19T08:05:42.153351Z: {"tx_id":"01khtexw3z80rgcvnbndbgc1xk","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:05:42.153283Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-02-19T08:05:42.143270Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2026-02-19T08:05:42.348519Z: {"tx_id":"01khtexw3z80rgcvnbndbgc1xk","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:05:42.348464Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-02-19T08:05:42.181096Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-02-19T08:05:42.372712Z: {"tx_id":"01khtexw3z80rgcvnbndbgc1xk","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:05:42.372671Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-02-19T08:05:42.363084Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/003fca/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.t7jx84u5.txt 2026-02-19T08:05:37.189457Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:05:37.189416Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-02-19T08:05:36.891332Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] >> DescribeSchemaSecretsService::BatchRequest [GOOD] >> DescribeSchemaSecretsService::BigBatchRequest >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> test_canonical_records.py::test_create_drop_and_alter_database |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |88.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/00452c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.l0beadjb.txt 2026-02-19T08:05:49.294636Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> DescribeSchemaSecretsService::BigBatchRequest [GOOD] >> DescribeSchemaSecretsService::EmptyBatch >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] >> test.py::test[solomon-UnknownSetting-] [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> DescribeSchemaSecretsService::EmptyBatch [GOOD] >> DescribeSchemaSecretsService::MixedGrantsInBatch >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> DescribeSchemaSecretsService::MixedGrantsInBatch [GOOD] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2026-02-19T08:04:39.804334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:04:39.804423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.804520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:04:39.804566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:04:39.804616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:04:39.804662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:04:39.804690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:04:39.804743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:04:39.804821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:04:39.804901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:04:39.805683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:04:39.806033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:04:39.879015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:04:39.879079Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:39.891185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:04:39.891824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:04:39.892023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:04:39.899569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:04:39.899804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:04:39.900512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:04:39.900831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:04:39.903163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.903332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:04:39.904458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:04:39.904535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:04:39.904748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:04:39.904816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:04:39.904867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:04:39.905016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:04:40.053625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.054969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:04:40.055517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:332:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rd_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.148281Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.148365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:34.148397Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:34.511751Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.511818Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.511902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:34.511944Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:34.804508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.804573Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:34.804623Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:34.804645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.184487Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.184557Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.184629Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.184659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.549578Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.549650Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.549741Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.549774Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.918291Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.918360Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:35.918426Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:35.918455Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:36.274513Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:36.274565Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:36.274611Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:36.274631Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:36.672580Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:36.672665Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:06:36.672753Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:36.672793Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:06:36.760820Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [7:1503:3252], Recipient [7:243:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:06:36.760913Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:06:36.761050Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:06:36.761276Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 216us result status StatusPathDoesNotExist 2026-02-19T08:06:36.761443Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:06:36.761966Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [7:1504:3253], Recipient [7:243:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:06:36.762025Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:06:36.762128Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:06:36.762294Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 168us result status StatusPathDoesNotExist 2026-02-19T08:06:36.762428Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:06:36.762862Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [7:1505:3254], Recipient [7:243:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2026-02-19T08:06:36.762912Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:06:36.763007Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:06:36.763192Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 172us result status StatusPathDoesNotExist 2026-02-19T08:06:36.763350Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |88.9%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest |88.9%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut >> TBsLocalRecovery::WriteRestartRead [GOOD] >> TBsLocalRecovery::WriteRestartReadHuge |89.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |89.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> DSProxyStrategyTest::Restore_block42 [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2026-02-19T08:05:10.606387Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608484359183684623:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:05:10.608114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:05:10.979226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:11.008307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:11.008430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:11.047386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:11.082286Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:11.172985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:05:11.185948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:05:11.185977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:05:11.185996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:05:11.186090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:05:11.400063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:11.455607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:05:11.618677Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:16.200992Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608484385921459219:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:05:16.201043Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:05:16.262033Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:16.462733Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:16.482072Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:16.482147Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:16.506918Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:16.583142Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:05:16.812783Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:05:16.812804Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:05:16.812825Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:05:16.812906Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:05:17.091851Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:17.169988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:05:17.199348Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:23.608870Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608484416715476045:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:05:23.608937Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:05:23.803659Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:24.078145Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:24.141690Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:24.206504Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:24.206582Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:24.303284Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:24.420523Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006122s 2026-02-19T08:05:24.525093Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:05:24.525134Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:05:24.525143Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:05:24.525222Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:05:24.674036Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:05:24.774670Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:25.387720Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:25.814515Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:05:33.625747Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:33.625953Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:05:33.843883Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:05:33.873908Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:05:33.879987Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:05:33.880072Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:05:33.896239Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:05:34.197866Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:05:34.197889Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:05:34.197900Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:05:34.197992Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:05:34.214549Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:05:34.591559Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:05:34.663617Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:05:34.804876Z node 10 :FLAT_TX_SC ... execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:18.404660Z node 28 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:06:18.436873Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:06:18.436985Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:06:18.510861Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:06:18.605780Z node 30 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009429s 2026-02-19T08:06:18.613921Z node 29 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012878s 2026-02-19T08:06:18.722374Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:06:18.722397Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:06:18.722407Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:06:18.722497Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:06:18.825812Z node 28 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:06:19.075669Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:06:19.196114Z node 28 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:06:19.199399Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:06:24.653448Z node 31 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7608484676941842872:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:06:24.653514Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:06:24.689928Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:24.833760Z node 31 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:06:24.860187Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:06:24.860308Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:06:24.901464Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:06:24.918759Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:06:24.984025Z node 32 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005984s 2026-02-19T08:06:25.038640Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:06:25.038669Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:06:25.038680Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:06:25.038793Z node 31 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:06:25.395134Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:06:25.494180Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:06:25.665784Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:06:31.403791Z node 34 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7608484707619873026:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:06:31.403886Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:06:31.498868Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:31.652494Z node 34 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:06:31.684538Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:06:31.685606Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:06:31.729407Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:31.737375Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:06:31.847862Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:06:31.847889Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:06:31.847923Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:06:31.848060Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:06:32.061020Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:06:32.165968Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:06:32.310666Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:06:32.422114Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:06:38.672071Z node 37 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7608484735981588074:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:06:38.672336Z node 37 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:06:38.711392Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:38.838384Z node 37 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:06:38.870418Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:06:38.870546Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:06:38.889231Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:06:39.014779Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:06:39.035058Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:06:39.035086Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:06:39.035097Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:06:39.035215Z node 37 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:06:39.209080Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:06:39.291288Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:06:39.696164Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> Backpressure::MonteCarlo [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_truncate/ydb-core-tx-datashard-ut_truncate |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_truncate/ydb-core-tx-datashard-ut_truncate |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |89.0%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_truncate/ydb-core-tx-datashard-ut_truncate >> DescribeSchemaSecretsService::SchemeCacheRetryErrors [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut_client/unittest >> Backpressure::MonteCarlo [GOOD] Test command err: Clock# 1970-01-01T00:00:00.000000Z elapsed# 0.000031s EventsProcessed# 0 clients.size# 0 Clock# 1970-01-01T00:00:10.358289Z elapsed# 0.000141s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:21.564929Z elapsed# 0.000167s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:36.077748Z elapsed# 0.000186s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:50.992063Z elapsed# 0.000205s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:03.810209Z elapsed# 0.000228s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:21.938232Z elapsed# 0.000252s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:32.567374Z elapsed# 0.000284s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:52.476302Z elapsed# 0.000303s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:10.303693Z elapsed# 0.000323s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:25.625711Z elapsed# 0.000343s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:44.233928Z elapsed# 0.000367s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:00.989316Z elapsed# 0.000385s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:12.198128Z elapsed# 0.000408s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:26.269434Z elapsed# 0.013304s EventsProcessed# 1707 clients.size# 1 Clock# 1970-01-01T00:03:42.439589Z elapsed# 0.026196s EventsProcessed# 3656 clients.size# 1 Clock# 1970-01-01T00:04:01.079345Z elapsed# 0.040997s EventsProcessed# 5879 clients.size# 1 Clock# 1970-01-01T00:04:17.952505Z elapsed# 0.054099s EventsProcessed# 7864 clients.size# 1 Clock# 1970-01-01T00:04:31.057168Z elapsed# 0.065584s EventsProcessed# 9410 clients.size# 1 Clock# 1970-01-01T00:04:42.141487Z elapsed# 0.074249s EventsProcessed# 10751 clients.size# 1 Clock# 1970-01-01T00:04:53.637480Z elapsed# 0.083754s EventsProcessed# 12142 clients.size# 1 Clock# 1970-01-01T00:05:07.769394Z elapsed# 0.094221s EventsProcessed# 13780 clients.size# 1 Clock# 1970-01-01T00:05:21.377386Z elapsed# 0.105078s EventsProcessed# 15461 clients.size# 1 Clock# 1970-01-01T00:05:38.071729Z elapsed# 0.118858s EventsProcessed# 17465 clients.size# 1 Clock# 1970-01-01T00:05:51.741388Z elapsed# 0.129037s EventsProcessed# 19040 clients.size# 1 Clock# 1970-01-01T00:06:05.963833Z elapsed# 0.140084s EventsProcessed# 20671 clients.size# 1 Clock# 1970-01-01T00:06:25.344818Z elapsed# 0.156760s EventsProcessed# 23037 clients.size# 1 Clock# 1970-01-01T00:06:35.741545Z elapsed# 0.165776s EventsProcessed# 24384 clients.size# 1 Clock# 1970-01-01T00:06:52.612706Z elapsed# 0.178352s EventsProcessed# 26382 clients.size# 1 Clock# 1970-01-01T00:07:09.071799Z elapsed# 0.193878s EventsProcessed# 28323 clients.size# 1 Clock# 1970-01-01T00:07:21.583313Z elapsed# 0.208996s EventsProcessed# 29863 clients.size# 1 Clock# 1970-01-01T00:07:33.626190Z elapsed# 0.222967s EventsProcessed# 31303 clients.size# 1 Clock# 1970-01-01T00:07:44.962851Z elapsed# 0.247057s EventsProcessed# 33976 clients.size# 2 Clock# 1970-01-01T00:07:59.744126Z elapsed# 0.270331s EventsProcessed# 37443 clients.size# 2 Clock# 1970-01-01T00:08:12.060997Z elapsed# 0.290826s EventsProcessed# 40428 clients.size# 2 Clock# 1970-01-01T00:08:22.157505Z elapsed# 0.306879s EventsProcessed# 42745 clients.size# 2 Clock# 1970-01-01T00:08:37.373241Z elapsed# 0.329574s EventsProcessed# 46354 clients.size# 2 Clock# 1970-01-01T00:08:53.315080Z elapsed# 0.354347s EventsProcessed# 50033 clients.size# 2 Clock# 1970-01-01T00:09:09.574822Z elapsed# 0.380734s EventsProcessed# 53803 clients.size# 2 Clock# 1970-01-01T00:09:20.169102Z elapsed# 0.396439s EventsProcessed# 56359 clients.size# 2 Clock# 1970-01-01T00:09:36.246353Z elapsed# 0.423840s EventsProcessed# 60291 clients.size# 2 Clock# 1970-01-01T00:09:48.303436Z elapsed# 0.448270s EventsProcessed# 63137 clients.size# 2 Clock# 1970-01-01T00:10:00.868609Z elapsed# 0.470045s EventsProcessed# 66179 clients.size# 2 Clock# 1970-01-01T00:10:13.146845Z elapsed# 0.500096s EventsProcessed# 70477 clients.size# 3 Clock# 1970-01-01T00:10:32.126168Z elapsed# 0.542465s EventsProcessed# 77268 clients.size# 3 Clock# 1970-01-01T00:10:48.954686Z elapsed# 0.580865s EventsProcessed# 83396 clients.size# 3 Clock# 1970-01-01T00:11:06.404102Z elapsed# 0.635833s EventsProcessed# 91598 clients.size# 4 Clock# 1970-01-01T00:11:16.757244Z elapsed# 0.666815s EventsProcessed# 96459 clients.size# 4 Clock# 1970-01-01T00:11:29.707728Z elapsed# 0.715951s EventsProcessed# 104132 clients.size# 5 Clock# 1970-01-01T00:11:47.383257Z elapsed# 0.787263s EventsProcessed# 114619 clients.size# 5 Clock# 1970-01-01T00:12:02.277606Z elapsed# 0.835098s EventsProcessed# 121703 clients.size# 4 Clock# 1970-01-01T00:12:16.724455Z elapsed# 0.880513s EventsProcessed# 128628 clients.size# 4 Clock# 1970-01-01T00:12:27.450848Z elapsed# 0.913524s EventsProcessed# 133727 clients.size# 4 Clock# 1970-01-01T00:12:43.022441Z elapsed# 0.958843s EventsProcessed# 140950 clients.size# 4 Clock# 1970-01-01T00:12:56.889392Z elapsed# 1.016089s EventsProcessed# 149176 clients.size# 5 Clock# 1970-01-01T00:13:11.764018Z elapsed# 1.069749s EventsProcessed# 157864 clients.size# 5 Clock# 1970-01-01T00:13:30.678966Z elapsed# 1.127714s EventsProcessed# 166844 clients.size# 4 Clock# 1970-01-01T00:13:48.067778Z elapsed# 1.182569s EventsProcessed# 175077 clients.size# 4 Clock# 1970-01-01T00:13:59.966847Z elapsed# 1.224813s EventsProcessed# 180823 clients.size# 4 Clock# 1970-01-01T00:14:16.947021Z elapsed# 1.279977s EventsProcessed# 189044 clients.size# 4 Clock# 1970-01-01T00:14:33.106934Z elapsed# 1.330316s EventsProcessed# 196688 clients.size# 4 Clock# 1970-01-01T00:14:43.146234Z elapsed# 1.360311s EventsProcessed# 201363 clients.size# 4 Clock# 1970-01-01T00:14:55.571113Z elapsed# 1.397164s EventsProcessed# 207384 clients.size# 4 Clock# 1970-01-01T00:15:06.732603Z elapsed# 1.430550s EventsProcessed# 212574 clients.size# 4 Clock# 1970-01-01T00:15:18.214162Z elapsed# 1.465682s EventsProcessed# 218012 clients.size# 4 Clock# 1970-01-01T00:15:28.765796Z elapsed# 1.498761s EventsProcessed# 222830 clients.size# 4 Clock# 1970-01-01T00:15:44.730726Z elapsed# 1.554520s EventsProcessed# 230368 clients.size# 4 Clock# 1970-01-01T00:15:58.440945Z elapsed# 1.596512s EventsProcessed# 236879 clients.size# 4 Clock# 1970-01-01T00:16:13.305263Z elapsed# 1.653424s EventsProcessed# 245745 clients.size# 5 Clock# 1970-01-01T00:16:24.722408Z elapsed# 1.696163s EventsProcessed# 252392 clients.size# 5 Clock# 1970-01-01T00:16:37.505242Z elapsed# 1.747879s EventsProcessed# 259905 clients.size# 5 Clock# 1970-01-01T00:16:48.154961Z elapsed# 1.787381s EventsProcessed# 266119 clients.size# 5 Clock# 1970-01-01T00:17:06.505966Z elapsed# 1.854908s EventsProcessed# 276955 clients.size# 5 Clock# 1970-01-01T00:17:16.689058Z elapsed# 1.892916s EventsProcessed# 283051 clients.size# 5 Clock# 1970-01-01T00:17:32.490509Z elapsed# 1.950886s EventsProcessed# 292497 clients.size# 5 Clock# 1970-01-01T00:17:51.796328Z elapsed# 2.021520s EventsProcessed# 303927 clients.size# 5 Clock# 1970-01-01T00:18:03.420051Z elapsed# 2.082406s EventsProcessed# 312115 clients.size# 6 Clock# 1970-01-01T00:18:16.526571Z elapsed# 2.142649s EventsProcessed# 321301 clients.size# 6 Clock# 1970-01-01T00:18:30.046406Z elapsed# 2.201209s EventsProcessed# 330860 clients.size# 6 Clock# 1970-01-01T00:18:45.166813Z elapsed# 2.266247s EventsProcessed# 341376 clients.size# 6 Clock# 1970-01-01T00:19:00.180925Z elapsed# 2.340372s EventsProcessed# 352277 clients.size# 6 Clock# 1970-01-01T00:19:15.782767Z elapsed# 2.384356s EventsProcessed# 361445 clients.size# 5 Clock# 1970-01-01T00:19:35.590076Z elapsed# 2.444908s EventsProcessed# 373218 clients.size# 5 Clock# 1970-01-01T00:19:54.020533Z elapsed# 2.506109s EventsProcessed# 382039 clients.size# 4 Clock# 1970-01-01T00:20:13.967467Z elapsed# 2.540882s EventsProcessed# 389285 clients.size# 3 Clock# 1970-01-01T00:20:30.011416Z elapsed# 2.566645s EventsProcessed# 394919 clients.size# 3 Clock# 1970-01-01T00:20:43.295852Z elapsed# 2.588684s EventsProcessed# 399621 clients.size# 3 Clock# 1970-01-01T00:21:00.249112Z elapsed# 2.649993s EventsProcessed# 405582 clients.size# 3 Clock# 1970-01-01T00:21:14.684462Z elapsed# 2.721606s EventsProcessed# 412522 clients.size# 4 Clock# 1970-01-01T00:21:26.676721Z elapsed# 2.776306s EventsProcessed# 418415 clients.size# 4 Clock# 1970-01-01T00:21:45.948838Z elapsed# 2.834572s EventsProcessed# 427550 clients.size# 4 Clock# 1970-01-01T00:22:01.536570Z elapsed# 2.877973s EventsProcessed# 434813 clients.size# 4 Clock# 1970-01-01T00:22:17.511479Z elapsed# 2.924968s EventsProcessed# 442479 clients.size# 4 Clock# 1970-01-01T00:22:30.521956Z elapsed# 2.962719s EventsProcessed# 448736 clients.size# 4 Clock# 1970-01-01T00:22:44.678709Z elapsed# 3.025367s EventsProcessed# 455551 clients.size# 4 Clock# 1970-01-01T00:22:58.827327Z elapsed# 3.067475s EventsProcessed# 462350 clients.size# 4 Clock# 1970-01-01T00:23:15.950456Z elapsed# 3.118973s EventsProcessed# 470338 clients.size# 4 Clock# 1970-01-01T00:23:29.613736Z elapsed# 3.149854s EventsProcessed# 475088 clients.size# 3 Clock# 1970-01-01T00:23:40.245894Z elapsed# 3.166342s EventsProcessed# 477594 clients.size# 2 Clock# 1970-01-01T00:24:00.112768Z elapsed# 3.196088s EventsProcessed# 482220 clients.size# 2 Clock# 1970-01-01T00:24:19.678380Z elapsed# 3.226469s EventsProcessed# 486851 clients.size# 2 Clock# 1970-01-01T00:24:37.164611Z elapsed# 3.274021s EventsProcessed# 491080 clients.size# 2 Clock# 1970-01-01T00:24:47.861213Z elapsed# 3.290558s EventsProcessed# 493557 clients.size# 2 Clock# 1970-01-01T00:25:07.800861Z elapsed# 3.321560s EventsProcessed# 498303 clients.size# 2 Clock# 1970-01-01T00:25:27.274703Z elapsed# 3.350941s EventsProcessed# 502683 clients.size# 2 Clock# 1970-01-01T00:25:46.508720Z elapsed# 3.381586s EventsProcessed# 507215 clients.size# 2 Clock# 1970-01-01T00:25:59.592122Z elapsed# 3.402023s EventsProcessed# 510350 clients.size# 2 Clock# 1970-01-01T00:26:12.126545Z elapsed# 3.421392s EventsProcessed# 513331 clients.size# 2 Clock# 1970-01-01T00:26:30.578120Z elapsed# 3.448741s EventsProcessed# 517672 clients.size# 2 Clock# 1970-01-01T00:26:41.225847Z elapsed# 3.465803s EventsProcessed# 520249 clients.size# 2 Clock# 1970-01-01T00:26:51.461374Z elapsed# 3.489037s EventsProcessed# 523780 clients.size# 3 Clock# 1970-01-01T00:27:09.060333Z elapsed# 3.547811s EventsProcessed# 529861 clients.size# 3 Clock# 1970-01-01T00:27:27.669506Z elapsed# 3.593589s EventsProcessed# 536531 clients.size# 3 Clock# 1970-01-01T00:27:40.158033Z elapsed# 3.622027s EventsProcessed# 540986 clients.size# 3 Clock# 1970-01-01T00:27:56.581669Z elapsed# 3.667438s EventsProcessed# 546938 clients.size# 3 Clock# 1970-01-01T00:28:13.508389Z elapsed# 3.709525s EventsProcessed# 552940 clients.size# 3 Clock# 1970-01-01T00:28:26.962010Z elapsed# 3.740961s EventsProcessed# 557887 clients.size# 3 Clock# 1970-01-01T00:28:38.961672Z elapsed# 3.785891s EventsProcessed# 562158 clients.size# 3 Clock# 1970-01-01T00:28:56.296482Z elapsed# 3.826058s EventsProcessed# 568470 clients.size# 3 Clock# 1970-01-01T00:29:08.573472Z elapsed# 3.852079s EventsProcessed# 572897 clients.size# 3 Clock# 1970-01-01T00:29:27.724292Z elapsed# 3.892602s EventsProcessed# 579632 clients.size# 3 Clock# 1970-01-01T00:29:42.071035Z elapsed# 3.923987s EventsProcessed# 584717 clients.size# 3 Clock# 1970-01-01T00:29:53.374759Z elapsed# 3.948629s EventsProcessed# 588654 clients.size# 3 Clock# 1970-01-01T00:30:09.987362Z elapsed# 3.988537s EventsProcessed# 594777 clients.size# 3 Clock# 1970-01-01T00:30:29.499672Z elapsed# 4.052120s EventsProcessed# 601668 clients.size# 3 Clock# 1970-01-01T00:30:48.175180Z elapsed# 4.082969s EventsProcessed# 606139 clients.size# 2 Clock# 1970-01-01T00:31:00.904994Z elapsed# 4.112860s EventsProcessed# 610650 clients.size# 3 Clock# 1970-01-01T00:31:19.272546Z elapsed# 4.155831s EventsProcessed# 617265 clients.size# 3 Clock# 1970-01-01T00:31:36.028959Z elapsed# 4.197448s EventsProcessed# 623348 ... 70-01-01T05:29:37.564679Z elapsed# 138.739421s EventsProcessed# 12614808 clients.size# 6 Clock# 1970-01-01T05:29:55.779193Z elapsed# 138.883671s EventsProcessed# 12627863 clients.size# 6 Clock# 1970-01-01T05:30:10.517001Z elapsed# 139.029286s EventsProcessed# 12638301 clients.size# 6 Clock# 1970-01-01T05:30:23.310138Z elapsed# 139.126500s EventsProcessed# 12647552 clients.size# 6 Clock# 1970-01-01T05:30:35.602227Z elapsed# 139.229460s EventsProcessed# 12656283 clients.size# 6 Clock# 1970-01-01T05:30:49.237719Z elapsed# 139.344710s EventsProcessed# 12666038 clients.size# 6 Clock# 1970-01-01T05:31:02.561512Z elapsed# 139.494208s EventsProcessed# 12675330 clients.size# 6 Clock# 1970-01-01T05:31:18.607582Z elapsed# 139.638191s EventsProcessed# 12686858 clients.size# 6 Clock# 1970-01-01T05:31:31.749443Z elapsed# 139.752748s EventsProcessed# 12696074 clients.size# 6 Clock# 1970-01-01T05:31:50.797788Z elapsed# 139.974474s EventsProcessed# 12711908 clients.size# 7 Clock# 1970-01-01T05:32:08.817627Z elapsed# 140.157146s EventsProcessed# 12726889 clients.size# 7 Clock# 1970-01-01T05:32:24.218454Z elapsed# 140.315968s EventsProcessed# 12739765 clients.size# 7 Clock# 1970-01-01T05:32:34.304262Z elapsed# 140.456876s EventsProcessed# 12748413 clients.size# 7 Clock# 1970-01-01T05:32:50.360503Z elapsed# 140.610926s EventsProcessed# 12761607 clients.size# 7 Clock# 1970-01-01T05:33:04.246215Z elapsed# 140.739731s EventsProcessed# 12773034 clients.size# 7 Clock# 1970-01-01T05:33:17.620712Z elapsed# 140.911487s EventsProcessed# 12784181 clients.size# 7 Clock# 1970-01-01T05:33:33.573908Z elapsed# 141.047578s EventsProcessed# 12795425 clients.size# 6 Clock# 1970-01-01T05:33:50.336090Z elapsed# 141.188045s EventsProcessed# 12807415 clients.size# 6 Clock# 1970-01-01T05:34:08.908956Z elapsed# 141.385294s EventsProcessed# 12820576 clients.size# 6 Clock# 1970-01-01T05:34:20.229447Z elapsed# 141.480175s EventsProcessed# 12828428 clients.size# 6 Clock# 1970-01-01T05:34:32.484865Z elapsed# 141.582462s EventsProcessed# 12837078 clients.size# 6 Clock# 1970-01-01T05:34:46.603078Z elapsed# 141.718626s EventsProcessed# 12848743 clients.size# 7 Clock# 1970-01-01T05:35:02.241966Z elapsed# 141.912685s EventsProcessed# 12861760 clients.size# 7 Clock# 1970-01-01T05:35:22.054545Z elapsed# 142.089531s EventsProcessed# 12878454 clients.size# 7 Clock# 1970-01-01T05:35:33.671389Z elapsed# 142.181797s EventsProcessed# 12888028 clients.size# 7 Clock# 1970-01-01T05:35:47.611960Z elapsed# 142.325239s EventsProcessed# 12899476 clients.size# 7 Clock# 1970-01-01T05:36:02.444322Z elapsed# 142.434808s EventsProcessed# 12911616 clients.size# 7 Clock# 1970-01-01T05:36:15.312891Z elapsed# 142.529409s EventsProcessed# 12922329 clients.size# 7 Clock# 1970-01-01T05:36:30.700865Z elapsed# 142.674718s EventsProcessed# 12934880 clients.size# 7 Clock# 1970-01-01T05:36:48.798736Z elapsed# 142.817988s EventsProcessed# 12950183 clients.size# 7 Clock# 1970-01-01T05:37:01.251983Z elapsed# 142.915318s EventsProcessed# 12960432 clients.size# 7 Clock# 1970-01-01T05:37:20.926588Z elapsed# 143.119380s EventsProcessed# 12979242 clients.size# 8 Clock# 1970-01-01T05:37:33.153959Z elapsed# 143.234116s EventsProcessed# 12990929 clients.size# 8 Clock# 1970-01-01T05:37:46.238969Z elapsed# 143.387686s EventsProcessed# 13003416 clients.size# 8 Clock# 1970-01-01T05:37:59.771094Z elapsed# 143.510131s EventsProcessed# 13016428 clients.size# 8 Clock# 1970-01-01T05:38:14.048392Z elapsed# 143.639469s EventsProcessed# 13030046 clients.size# 8 Clock# 1970-01-01T05:38:28.032489Z elapsed# 143.829076s EventsProcessed# 13044955 clients.size# 9 Clock# 1970-01-01T05:38:40.686566Z elapsed# 143.998030s EventsProcessed# 13060006 clients.size# 10 Clock# 1970-01-01T05:38:51.934691Z elapsed# 144.168209s EventsProcessed# 13073519 clients.size# 10 Clock# 1970-01-01T05:39:04.269353Z elapsed# 144.333265s EventsProcessed# 13088322 clients.size# 10 Clock# 1970-01-01T05:39:21.347723Z elapsed# 144.634656s EventsProcessed# 13108995 clients.size# 10 Clock# 1970-01-01T05:39:37.900433Z elapsed# 144.854271s EventsProcessed# 13128751 clients.size# 10 Clock# 1970-01-01T05:39:48.616494Z elapsed# 144.976882s EventsProcessed# 13141270 clients.size# 10 Clock# 1970-01-01T05:39:58.635543Z elapsed# 145.139811s EventsProcessed# 13153317 clients.size# 10 Clock# 1970-01-01T05:40:09.876773Z elapsed# 145.254344s EventsProcessed# 13165512 clients.size# 9 Clock# 1970-01-01T05:40:26.535421Z elapsed# 145.504040s EventsProcessed# 13185482 clients.size# 10 Clock# 1970-01-01T05:40:37.542515Z elapsed# 145.640462s EventsProcessed# 13198574 clients.size# 10 Clock# 1970-01-01T05:40:47.974290Z elapsed# 145.792428s EventsProcessed# 13211069 clients.size# 10 Clock# 1970-01-01T05:41:00.718639Z elapsed# 146.037047s EventsProcessed# 13226219 clients.size# 10 Clock# 1970-01-01T05:41:11.812799Z elapsed# 146.198922s EventsProcessed# 13239298 clients.size# 10 Clock# 1970-01-01T05:41:28.510076Z elapsed# 146.486086s EventsProcessed# 13259031 clients.size# 10 Clock# 1970-01-01T05:41:45.286003Z elapsed# 146.726521s EventsProcessed# 13278929 clients.size# 10 Clock# 1970-01-01T05:41:58.147614Z elapsed# 146.946276s EventsProcessed# 13293850 clients.size# 10 Clock# 1970-01-01T05:42:14.698700Z elapsed# 147.184230s EventsProcessed# 13313365 clients.size# 10 Clock# 1970-01-01T05:42:29.488752Z elapsed# 147.437077s EventsProcessed# 13331030 clients.size# 10 Clock# 1970-01-01T05:42:49.170196Z elapsed# 147.720353s EventsProcessed# 13354227 clients.size# 10 Clock# 1970-01-01T05:43:04.905456Z elapsed# 147.999425s EventsProcessed# 13372809 clients.size# 10 Clock# 1970-01-01T05:43:17.365207Z elapsed# 148.223271s EventsProcessed# 13387408 clients.size# 10 Clock# 1970-01-01T05:43:34.796290Z elapsed# 148.545337s EventsProcessed# 13408039 clients.size# 10 Clock# 1970-01-01T05:43:45.928763Z elapsed# 148.689170s EventsProcessed# 13421199 clients.size# 10 Clock# 1970-01-01T05:43:57.599659Z elapsed# 148.905036s EventsProcessed# 13435137 clients.size# 10 Clock# 1970-01-01T05:44:13.471792Z elapsed# 149.139459s EventsProcessed# 13453825 clients.size# 10 Clock# 1970-01-01T05:44:32.599529Z elapsed# 149.447764s EventsProcessed# 13474350 clients.size# 9 Clock# 1970-01-01T05:44:51.853799Z elapsed# 149.692301s EventsProcessed# 13494869 clients.size# 9 Clock# 1970-01-01T05:45:11.152519Z elapsed# 149.988338s EventsProcessed# 13515633 clients.size# 9 Clock# 1970-01-01T05:45:27.304888Z elapsed# 150.203821s EventsProcessed# 13532794 clients.size# 9 Clock# 1970-01-01T05:45:43.893069Z elapsed# 150.464155s EventsProcessed# 13550442 clients.size# 9 Clock# 1970-01-01T05:45:59.096823Z elapsed# 150.688280s EventsProcessed# 13568441 clients.size# 10 Clock# 1970-01-01T05:46:12.780268Z elapsed# 150.936918s EventsProcessed# 13584881 clients.size# 10 Clock# 1970-01-01T05:46:27.386372Z elapsed# 151.151788s EventsProcessed# 13602142 clients.size# 10 Clock# 1970-01-01T05:46:43.777694Z elapsed# 151.441009s EventsProcessed# 13621697 clients.size# 10 Clock# 1970-01-01T05:46:55.452086Z elapsed# 151.614514s EventsProcessed# 13635592 clients.size# 10 Clock# 1970-01-01T05:47:11.550363Z elapsed# 151.902411s EventsProcessed# 13654660 clients.size# 10 Clock# 1970-01-01T05:47:24.974376Z elapsed# 152.102170s EventsProcessed# 13670529 clients.size# 10 Clock# 1970-01-01T05:47:36.581048Z elapsed# 152.273194s EventsProcessed# 13684365 clients.size# 10 Clock# 1970-01-01T05:47:47.323848Z elapsed# 152.482665s EventsProcessed# 13697373 clients.size# 10 Clock# 1970-01-01T05:48:04.600053Z elapsed# 152.742740s EventsProcessed# 13718250 clients.size# 10 Clock# 1970-01-01T05:48:23.394971Z elapsed# 153.061941s EventsProcessed# 13740634 clients.size# 10 Clock# 1970-01-01T05:48:34.012909Z elapsed# 153.214951s EventsProcessed# 13753274 clients.size# 10 Clock# 1970-01-01T05:48:50.342586Z elapsed# 153.474412s EventsProcessed# 13770540 clients.size# 9 Clock# 1970-01-01T05:49:01.951992Z elapsed# 153.649795s EventsProcessed# 13784556 clients.size# 10 Clock# 1970-01-01T05:49:20.481634Z elapsed# 153.967753s EventsProcessed# 13806410 clients.size# 10 Clock# 1970-01-01T05:49:32.547591Z elapsed# 154.141301s EventsProcessed# 13820491 clients.size# 10 Clock# 1970-01-01T05:49:50.800614Z elapsed# 154.427625s EventsProcessed# 13842115 clients.size# 10 Clock# 1970-01-01T05:50:04.094937Z elapsed# 154.602565s EventsProcessed# 13857872 clients.size# 10 Clock# 1970-01-01T05:50:19.798270Z elapsed# 154.876409s EventsProcessed# 13876537 clients.size# 10 Clock# 1970-01-01T05:50:39.702392Z elapsed# 155.165082s EventsProcessed# 13899986 clients.size# 10 Clock# 1970-01-01T05:50:53.722554Z elapsed# 155.385167s EventsProcessed# 13916516 clients.size# 10 Clock# 1970-01-01T05:51:12.552789Z elapsed# 155.644959s EventsProcessed# 13938839 clients.size# 10 Clock# 1970-01-01T05:51:25.597433Z elapsed# 155.866601s EventsProcessed# 13954485 clients.size# 10 Clock# 1970-01-01T05:51:38.424807Z elapsed# 156.045819s EventsProcessed# 13970153 clients.size# 10 Clock# 1970-01-01T05:51:50.716543Z elapsed# 156.267991s EventsProcessed# 13984667 clients.size# 10 Clock# 1970-01-01T05:52:02.566499Z elapsed# 156.405396s EventsProcessed# 13997538 clients.size# 9 Clock# 1970-01-01T05:52:22.554589Z elapsed# 156.690475s EventsProcessed# 14018867 clients.size# 9 Clock# 1970-01-01T05:52:41.473260Z elapsed# 156.919771s EventsProcessed# 14038816 clients.size# 9 Clock# 1970-01-01T05:52:57.671080Z elapsed# 157.137529s EventsProcessed# 14056201 clients.size# 9 Clock# 1970-01-01T05:53:16.188396Z elapsed# 157.344120s EventsProcessed# 14075925 clients.size# 9 Clock# 1970-01-01T05:53:30.981411Z elapsed# 157.547237s EventsProcessed# 14091770 clients.size# 9 Clock# 1970-01-01T05:53:46.269946Z elapsed# 157.746279s EventsProcessed# 14107855 clients.size# 9 Clock# 1970-01-01T05:53:58.452894Z elapsed# 157.909822s EventsProcessed# 14119468 clients.size# 8 Clock# 1970-01-01T05:54:13.861050Z elapsed# 158.100801s EventsProcessed# 14133933 clients.size# 8 Clock# 1970-01-01T05:54:33.158155Z elapsed# 158.364678s EventsProcessed# 14152310 clients.size# 8 Clock# 1970-01-01T05:54:49.074812Z elapsed# 158.675548s EventsProcessed# 14169240 clients.size# 9 Clock# 1970-01-01T05:55:05.969488Z elapsed# 158.885170s EventsProcessed# 14187698 clients.size# 9 Clock# 1970-01-01T05:55:17.153630Z elapsed# 159.115148s EventsProcessed# 14199784 clients.size# 9 Clock# 1970-01-01T05:55:36.096495Z elapsed# 159.348671s EventsProcessed# 14217799 clients.size# 8 Clock# 1970-01-01T05:55:46.971099Z elapsed# 159.491019s EventsProcessed# 14227876 clients.size# 8 Clock# 1970-01-01T05:56:04.175346Z elapsed# 159.733783s EventsProcessed# 14244037 clients.size# 8 Clock# 1970-01-01T05:56:21.394859Z elapsed# 159.897050s EventsProcessed# 14260292 clients.size# 8 Clock# 1970-01-01T05:56:38.065295Z elapsed# 160.106204s EventsProcessed# 14276147 clients.size# 8 Clock# 1970-01-01T05:56:57.242774Z elapsed# 160.322958s EventsProcessed# 14296833 clients.size# 9 Clock# 1970-01-01T05:57:09.304307Z elapsed# 160.489874s EventsProcessed# 14309697 clients.size# 9 Clock# 1970-01-01T05:57:27.954795Z elapsed# 160.724183s EventsProcessed# 14329483 clients.size# 9 Clock# 1970-01-01T05:57:39.002629Z elapsed# 160.910855s EventsProcessed# 14342504 clients.size# 10 Clock# 1970-01-01T05:57:57.516160Z elapsed# 161.119132s EventsProcessed# 14364447 clients.size# 10 Clock# 1970-01-01T05:58:16.917652Z elapsed# 161.404922s EventsProcessed# 14387329 clients.size# 10 Clock# 1970-01-01T05:58:34.692419Z elapsed# 161.633637s EventsProcessed# 14408486 clients.size# 10 Clock# 1970-01-01T05:58:49.256119Z elapsed# 161.859209s EventsProcessed# 14425582 clients.size# 10 Clock# 1970-01-01T05:58:59.955040Z elapsed# 161.984598s EventsProcessed# 14437240 clients.size# 9 Clock# 1970-01-01T05:59:14.000430Z elapsed# 162.228550s EventsProcessed# 14452136 clients.size# 9 Clock# 1970-01-01T05:59:30.817269Z elapsed# 162.443665s EventsProcessed# 14470026 clients.size# 9 Clock# 1970-01-01T05:59:49.038908Z elapsed# 162.706008s EventsProcessed# 14489513 clients.size# 9 >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |89.0%| [TM] {RESULT} ydb/core/blobstorage/backpressure/ut_client/unittest |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |89.0%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] Test command err: 2026-02-19T08:04:47.827835Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-02-19T08:04:47.827931Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-02-19T08:04:47.827954Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-02-19T08:04:47.828175Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-02-19T08:04:47.828196Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-02-19T08:04:47.828227Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-02-19T08:04:47.828286Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-02-19T08:04:47.830989Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-02-19T08:04:47.831021Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-02-19T08:04:47.831055Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-02-19T08:04:47.836713Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:391: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-02-19T08:04:47.836763Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:437: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-02-19T08:04:47.836811Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:447: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-02-19T08:04:47.836961Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.836987Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-02-19T08:04:47.837015Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.837037Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-02-19T08:04:47.837075Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.837106Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-02-19T08:04:47.837128Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:491: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-02-19T08:04:47.837195Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:503: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-02-19T08:04:47.837216Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:518: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-02-19T08:04:47.837311Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-02-19T08:04:47.837336Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2026-02-19T08:04:47.837361Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-02-19T08:04:47.837391Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2026-02-19T08:04:47.837418Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:545: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-02-19T08:04:47.837465Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:566: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-02-19T08:04:47.837500Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:581: [my-graph-id.42] [42:1] Checkpoint completed 2026-02-19T08:04:47.837528Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:651: [my-graph-id.42] Got TEvRunGraph 2026-02-19T08:04:47.904260Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-02-19T08:04:47.904350Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-02-19T08:04:47.904384Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-02-19T08:04:47.904569Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-02-19T08:04:47.904587Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-02-19T08:04:47.904621Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-02-19T08:04:47.904658Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-02-19T08:04:47.904758Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-02-19T08:04:47.904778Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-02-19T08:04:47.904796Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-02-19T08:04:47.904891Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:391: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-02-19T08:04:47.904930Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:437: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-02-19T08:04:47.904957Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:447: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-02-19T08:04:47.905063Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.905091Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-02-19T08:04:47.905129Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.905150Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-02-19T08:04:47.905174Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:47.905195Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-02-19T08:04:47.905219Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:491: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-02-19T08:04:47.905283Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:503: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-02-19T08:04:47.905307Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:518: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-02-19T08:04:47.905380Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-02-19T08:04:47.905404Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2026-02-19T08:04:47.905444Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-02-19T08:04:47.905469Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2026-02-19T08:04:47.905495Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:545: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-02-19T08:04:47.905536Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:566: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-02-19T08:04:47.905560Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:581: [my-graph-id.42] [42:1] Checkpoint completed 2026-02-19T08:04:47.905584Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:651: [my-graph-id.42] Got TEvRunGraph 2026-02-19T08:04:47.975662Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-02-19T08:04:47.975753Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-02-19T08:04:47.975777Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Wa ... -02-19T08:04:48.034523Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2026-02-19T08:04:48.034556Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:48.034583Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2026-02-19T08:04:48.034602Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:491: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-02-19T08:04:48.034632Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:503: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2026-02-19T08:04:48.034658Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:518: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-02-19T08:04:48.034730Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2026-02-19T08:04:48.034755Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2026-02-19T08:04:48.034780Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2026-02-19T08:04:48.034811Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2026-02-19T08:04:48.034831Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:545: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-02-19T08:04:48.034865Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:566: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2026-02-19T08:04:48.034883Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:581: [my-graph-id.42] [42:3] Checkpoint completed 2026-02-19T08:04:48.034905Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-02-19T08:04:48.034926Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-02-19T08:04:48.034965Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:391: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2026-02-19T08:04:48.034989Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:437: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-02-19T08:04:48.035035Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:48.035056Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2026-02-19T08:04:48.035079Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:48.035097Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2026-02-19T08:04:48.035144Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:48.035167Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2026-02-19T08:04:48.035185Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:491: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-02-19T08:04:48.035216Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:503: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2026-02-19T08:04:48.035245Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:518: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-02-19T08:04:48.035321Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2026-02-19T08:04:48.035352Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2026-02-19T08:04:48.035384Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:534: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2026-02-19T08:04:48.035413Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2026-02-19T08:04:48.035435Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:545: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-02-19T08:04:48.035469Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:566: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2026-02-19T08:04:48.035488Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:581: [my-graph-id.42] [42:4] Checkpoint completed 2026-02-19T08:04:48.095038Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-02-19T08:04:48.095114Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-02-19T08:04:48.095136Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-02-19T08:04:48.095305Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-02-19T08:04:48.095325Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-02-19T08:04:48.095344Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-02-19T08:04:48.095379Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-02-19T08:04:48.095498Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-02-19T08:04:48.095527Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-02-19T08:04:48.095552Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-02-19T08:04:48.095647Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:391: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-02-19T08:04:48.095669Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:437: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-02-19T08:04:48.095695Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:447: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-02-19T08:04:48.095789Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-02-19T08:04:48.095810Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:478: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-02-19T08:04:48.095852Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-02-19T08:04:48.095874Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:481: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-02-19T08:04:48.095901Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:468: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-02-19T08:04:48.095935Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:481: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-02-19T08:04:48.095965Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:486: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2026-02-19T08:04:48.096003Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:592: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2026-02-19T08:04:48.096022Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:598: [my-graph-id.42] [42:1] Checkpoint aborted 2026-02-19T08:04:48.096049Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-02-19T08:04:48.096070Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-02-19T08:04:48.096108Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:391: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2026-02-19T08:04:48.096125Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:437: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-02-19T08:04:48.195569Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-02-19T08:04:48.195645Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 0, ActorsToNotify count: 1, ActorsToWaitFor count: 2 2026-02-19T08:04:48.195677Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:107: [my-graph-id.42] No ingress tasks, coordinator was disabled 2026-02-19T08:04:48.195704Z node 6 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:447: [my-graph-id.42] Send TEvRun to all actors |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |89.0%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest |89.0%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/004528/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log.7ing7tcw.txt 2026-02-19T08:06:46.422908Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/actors/ut_service/fast/unittest >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] Test command err: 2026-02-19T08:04:41.676631Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608484232286697932:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:04:41.678612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:04:41.876757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:04:41.891816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:04:41.891925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:04:41.939409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:04:41.958839Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:04:41.959729Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608484232286697900:2081] 1771488281674758 != 1771488281674761 2026-02-19T08:04:42.004181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:04:42.004205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:04:42.004211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:04:42.004295Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:04:42.108530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:04:42.339226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:04:42.371437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.484522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.604790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.657484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:42.752461Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:04:44.315012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484245171601668:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.315108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.315331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484245171601678:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.315378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.528482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.550335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.570843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.591585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.613126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.639018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.666261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.721512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:04:44.770493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484245171602547:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.770557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.770558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484245171602552:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.770653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608484245171602554:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.770674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:04:44.772847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:04:44.779075Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608484245171602555:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:04:44.843621Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608484245171602607:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:04:45.806320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-02-19T08:04:46.620502Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608484256698417737:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:04:46.620552Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; ... 5]; 2026-02-19T08:06:49.290004Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:06:49.319530Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:06:49.475164Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:06:49.475294Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:06:49.485964Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7608484785199201214:2081] 1771488409284464 != 1771488409284467 2026-02-19T08:06:49.543354Z node 14 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:06:49.562755Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:06:49.631239Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:06:49.671704Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:06:49.671741Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:06:49.671753Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:06:49.671884Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:06:50.303358Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:06:50.608402Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:06:50.621445Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:06:50.634126Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:50.755497Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:51.063570Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:51.195133Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:54.290701Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7608484785199201245:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:06:54.290803Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:06:56.196557Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7608484815263974191:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:56.196740Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:56.197509Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7608484815263974201:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:56.197584Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:56.313918Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:56.372128Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:56.426617Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:56.496332Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:56.588936Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:56.703259Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:57.076151Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:57.227780Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:06:57.421105Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7608484819558942390:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:57.421279Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:57.421745Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7608484819558942395:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:57.421775Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7608484819558942396:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:57.421845Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:06:57.430493Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:06:57.460278Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7608484819558942399:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:06:57.548878Z node 14 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [14:7608484819558942452:3792] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:07:01.574368Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |89.0%| [TM] {RESULT} ydb/core/kqp/federated_query/actors/ut_service/fast/unittest |89.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |89.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |89.1%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |89.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |89.1%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |89.1%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |89.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTLAlterSettings::test_case >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ydb-core-tx-schemeshard-ut_truncate_table_simple |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ydb-core-tx-schemeshard-ut_truncate_table_simple |89.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_simple/ydb-core-tx-schemeshard-ut_truncate_table_simple >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/004526/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.rntphsyf.txt 2026-02-19T08:07:18.553533Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |89.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ld.lld: warning: contrib/tools/python3/libcontrib-tools-python3.a: archive member 'tsan.supp' is neither ET_REL nor LLVM bitcode |89.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |89.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_solomon [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/004504/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.r_51ptz9.txt 2026-02-19T08:07:51.380046Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:07:51.379996Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-02-19T08:07:51.302575Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/004503/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.o2i3_o0r.txt 2026-02-19T08:07:51.433206Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-02-19T08:07:51.433142Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-02-19T08:07:51.190299Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |89.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |89.2%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |89.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] |89.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 431 nonWorkingDomain# 0 20640 diskMask# 431 nonWorkingDomain# 1 47544 diskMask# 432 nonWorkingDomain# 0 170904 diskMask# 432 nonWorkingDomain# 1 64800 diskMask# 433 nonWorkingDomain# 0 74784 diskMask# 433 nonWorkingDomain# 1 8640 diskMask# 434 nonWorkingDomain# 0 140712 diskMask# 434 nonWorkingDomain# 1 8640 diskMask# 435 nonWorkingDomain# 0 55140 diskMask# 435 nonWorkingDomain# 1 4320 diskMask# 436 nonWorkingDomain# 0 140712 diskMask# 436 nonWorkingDomain# 1 64800 diskMask# 437 nonWorkingDomain# 0 55140 diskMask# 437 nonWorkingDomain# 1 8640 diskMask# 438 nonWorkingDomain# 0 75096 diskMask# 438 nonWorkingDomain# 1 8640 diskMask# 439 nonWorkingDomain# 0 25890 diskMask# 439 nonWorkingDomain# 1 4320 diskMask# 440 nonWorkingDomain# 0 93024 diskMask# 440 nonWorkingDomain# 1 8640 diskMask# 441 nonWorkingDomain# 0 39096 diskMask# 441 nonWorkingDomain# 1 62544 diskMask# 442 nonWorkingDomain# 0 66300 diskMask# 442 nonWorkingDomain# 1 4320 diskMask# 443 nonWorkingDomain# 0 22680 diskMask# 443 nonWorkingDomain# 1 48192 diskMask# 444 nonWorkingDomain# 0 66300 diskMask# 444 nonWorkingDomain# 1 8640 diskMask# 445 nonWorkingDomain# 0 22680 diskMask# 445 nonWorkingDomain# 1 62544 |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |89.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |89.3%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |89.3%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_borrowed_compaction/ydb-core-tx-datashard-ut_borrowed_compaction |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_borrowed_compaction/ydb-core-tx-datashard-ut_borrowed_compaction |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_borrowed_compaction/ydb-core-tx-datashard-ut_borrowed_compaction |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] >> HullReplWriteSst::Basic [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101236736 Size: 32979900} 749541 commit chunk# 2 {ChunkIdx: 2 Offset: 101240832 Size: 32973916} 749405 commit chunk# 3 {ChunkIdx: 3 Offset: 101236736 Size: 32979944} 749542 commit chunk# 4 {ChunkIdx: 4 Offset: 101216256 Size: 33000184} 750002 commit chunk# 5 {ChunkIdx: 5 Offset: 101236736 Size: 32980956} 749565 commit chunk# 6 {ChunkIdx: 6 Offset: 101199872 Size: 33014484} 750327 commit chunk# 7 {ChunkIdx: 7 Offset: 101203968 Size: 33011096} 750250 commit chunk# 8 {ChunkIdx: 8 Offset: 101228544 Size: 32986192} 749684 commit chunk# 9 {ChunkIdx: 9 Offset: 101220352 Size: 32994024} 749862 commit chunk# 10 {ChunkIdx: 10 Offset: 101199872 Size: 33016156} 750365 commit chunk# 11 {ChunkIdx: 11 Offset: 101232640 Size: 32983728} 749628 commit chunk# 12 {ChunkIdx: 12 Offset: 101228544 Size: 32989184} 749752 commit chunk# 13 {ChunkIdx: 13 Offset: 101236736 Size: 32976996} 749475 commit chunk# 14 {ChunkIdx: 14 Offset: 101212160 Size: 33005552} 750124 commit chunk# 15 {ChunkIdx: 15 Offset: 101240832 Size: 32974180} 749411 commit chunk# 16 {ChunkIdx: 16 Offset: 101224448 Size: 32989316} 749755 commit chunk# 17 {ChunkIdx: 17 Offset: 101228544 Size: 32987864} 749722 commit chunk# 18 {ChunkIdx: 18 Offset: 101228544 Size: 32987820} 749721 commit chunk# 19 {ChunkIdx: 19 Offset: 101195776 Size: 33018884} 750427 commit chunk# 20 {ChunkIdx: 20 Offset: 101224448 Size: 32990240} 749776 commit chunk# 21 {ChunkIdx: 21 Offset: 101224448 Size: 32992924} 749837 commit chunk# 22 {ChunkIdx: 22 Offset: 101236736 Size: 32979636} 749535 commit chunk# 23 {ChunkIdx: 23 Offset: 101257216 Size: 32959924} 749087 commit chunk# 24 {ChunkIdx: 24 Offset: 101224448 Size: 32990592} 749784 commit chunk# 25 {ChunkIdx: 25 Offset: 101236736 Size: 32980384} 749552 commit chunk# 26 {ChunkIdx: 26 Offset: 101191680 Size: 33025044} 750567 commit chunk# 27 {ChunkIdx: 27 Offset: 101208064 Size: 33006212} 750139 commit chunk# 28 {ChunkIdx: 28 Offset: 101220352 Size: 32997324} 749937 commit chunk# 29 {ChunkIdx: 29 Offset: 101257216 Size: 32958516} 749055 commit chunk# 30 {ChunkIdx: 30 Offset: 101216256 Size: 32998204} 749957 commit chunk# 31 {ChunkIdx: 31 Offset: 101208064 Size: 33009644} 750217 commit chunk# 32 {ChunkIdx: 32 Offset: 101208064 Size: 33009424} 750212 commit chunk# 33 {ChunkIdx: 33 Offset: 101244928 Size: 32969384} 749302 commit chunk# 34 {ChunkIdx: 34 Offset: 101236736 Size: 32980868} 749563 commit chunk# 35 {ChunkIdx: 35 Offset: 101236736 Size: 32980692} 749559 commit chunk# 36 {ChunkIdx: 36 Offset: 101203968 Size: 33011536} 750260 commit chunk# 37 {ChunkIdx: 37 Offset: 101216256 Size: 32997544} 749942 commit chunk# 38 {ChunkIdx: 38 Offset: 101220352 Size: 32995916} 749905 commit chunk# 39 {ChunkIdx: 39 Offset: 101240832 Size: 32974180} 749411 commit chunk# 40 {ChunkIdx: 40 Offset: 101261312 Size: 32953368} 748938 commit chunk# 41 {ChunkIdx: 41 Offset: 101220352 Size: 32995168} 749888 |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |89.4%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |89.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |89.4%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] >> TSubscriberSinglePathUpdateTest::TwoRingGroups >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberSinglePathUpdateTest::TwoRingGroups [GOOD] >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch >> TSubscriberSyncQuorumTest::OneRingGroup >> TSubscriberTest::NotifyUpdate >> TSubscriberSyncQuorumTest::OneRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::ReconnectOnFailure >> TSubscriberSinglePathUpdateTest::OneRingGroup >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-02-19T08:08:57.660462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-02-19T08:08:57.670511Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-02-19T08:08:57.670679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-02-19T08:08:57.670733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-02-19T08:08:57.670794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-02-19T08:08:57.670862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-02-19T08:08:57.670914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:08:57.671302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-02-19T08:08:57.671445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-02-19T08:08:57.671486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-02-19T08:08:57.671548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-02-19T08:08:57.671609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-02-19T08:08:57.671677Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:57.671738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-02-19T08:08:57.671811Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-02-19T08:08:57.672196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-02-19T08:08:57.672289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:29:2075] 2026-02-19T08:08:57.672362Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Update to strong state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-02-19T08:08:57.672665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-02-19T08:08:57.672739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:30:2075] 2026-02-19T08:08:57.672800Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-02-19T08:08:57.673024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-02-19T08:08:57.673085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:31:2075] 2026-02-19T08:08:57.673132Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:3298559222387:0] 2026-02-19T08:08:57.673372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:13:2060] 2026-02-19T08:08:57.673427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:32:2075] 2026-02-19T08:08:57.673492Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:4398070850163:0] 2026-02-19T08:08:57.673822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:14:2061] 2026-02-19T08:08:57.673883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:33:2075] 2026-02-19T08:08:57.673928Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:5497582477939:0] 2026-02-19T08:08:57.674161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:15:2062] 2026-02-19T08:08:57.674209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:34:2075] 2026-02-19T08:08:57.674256Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 6) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-02-19T08:08:58.050200Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.050678Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [2:23:2066] 2026-02-19T08:08:58.050722Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:4:2051] Upsert description: path# TestPath 2026-02-1 ... Update { Owner: 1 Generation: 1 }: sender# [2:18:2065], cookie# 0, event size# 80 2026-02-19T08:08:58.062167Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:4:2051] Update description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-02-19T08:08:58.062229Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:4:2051] Upsert description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path TestPath, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 34} 2026-02-19T08:08:58.062358Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:4:2051] 2026-02-19T08:08:58.062432Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:23:2066] 2026-02-19T08:08:58.062543Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:520: [proxy][2:20:2066][TestPath] Cluster state mismatch in replica notification: sender# [2:23:2066], subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-02-19T08:08:58.062660Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:4:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: TestPath }: sender# [2:23:2066] 2026-02-19T08:08:58.062711Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:4:2051] Unsubscribe: subscriber# [2:23:2066], path# TestPath 2026-02-19T08:08:58.062768Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:20:2066] 2026-02-19T08:08:58.062827Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:19:2066][TestPath] Ignore empty state: owner# [2:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-02-19T08:08:58.420862Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.421650Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-02-19T08:08:58.421742Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-02-19T08:08:58.421791Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-02-19T08:08:58.438678Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-02-19T08:08:58.438837Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-02-19T08:08:58.438980Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-02-19T08:08:58.439060Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.439125Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-02-19T08:08:58.439173Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.439317Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-02-19T08:08:58.439413Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-02-19T08:08:58.439460Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-02-19T08:08:58.439542Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-02-19T08:08:58.439611Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-02-19T08:08:58.439768Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-02-19T08:08:58.439823Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:08:58.439875Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-02-19T08:08:58.439929Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:08:58.439997Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-02-19T08:08:58.440027Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-02-19T08:08:58.440074Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-02-19T08:08:58.440135Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-02-19T08:08:58.440268Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-02-19T08:08:58.440435Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-02-19T08:08:58.440515Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-02-19T08:08:58.440582Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-02-19T08:08:58.440649Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:08:58.440747Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-02-19T08:08:58.440819Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-02-19T08:08:58.440854Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-02-19T08:08:58.440895Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-02-19T08:08:58.440966Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-02-19T08:08:58.441031Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.441314Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-02-19T08:08:58.441369Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-02-19T08:08:58.088778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.091031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-02-19T08:08:58.091141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-02-19T08:08:58.091185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-02-19T08:08:58.091452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-02-19T08:08:58.091521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-02-19T08:08:58.091642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-02-19T08:08:58.091706Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.091764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-02-19T08:08:58.091822Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.091942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-02-19T08:08:58.092014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-02-19T08:08:58.092065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-02-19T08:08:58.092181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-02-19T08:08:58.092236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-02-19T08:08:58.092379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-02-19T08:08:58.092441Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:08:58.092488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-02-19T08:08:58.092533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:08:58.092588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-02-19T08:08:58.092624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-02-19T08:08:58.092672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-02-19T08:08:58.092720Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:24339059:0] whose ring group state is: 0 2026-02-19T08:08:58.092853Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-02-19T08:08:58.092982Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-02-19T08:08:58.093037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-02-19T08:08:58.093102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-02-19T08:08:58.093156Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:08:58.093243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-02-19T08:08:58.093346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2026-02-19T08:08:58.093393Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-02-19T08:08:58.093427Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:19:2066][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-02-19T08:08:58.093498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-02-19T08:08:58.093561Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.093616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12346 2026-02-19T08:08:58.093650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:21:2066], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2026-02-19T08:08:58.401469Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[2:3298559222387:0], [2:4398070850163:0], [2:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-02-19T08:08:58.402569Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-02-19T08:08:58.402647Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-02-19T08:08:58.402689Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] 2026-02-19T08:08:58.402745Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:13:2060] 2026-02-19T08:08:58.402788Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:14:2061] 2026-02-19T08:08:58.402829Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr:: ... 19T08:08:58.406858Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2026-02-19T08:08:58.406926Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.406979Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:32:2075], cookie# 12346 2026-02-19T08:08:58.407017Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:08:58.407049Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-02-19T08:08:58.407105Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:33:2075], cookie# 12346 2026-02-19T08:08:58.407135Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:08:58.407162Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-02-19T08:08:58.407185Z node 2 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][2:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 ... waiting for initial path lookups 2026-02-19T08:08:58.800418Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-02-19T08:08:58.801138Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-02-19T08:08:58.801238Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-02-19T08:08:58.801286Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-02-19T08:08:58.801615Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-02-19T08:08:58.804711Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-02-19T08:08:58.804938Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-02-19T08:08:58.805044Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.805109Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-02-19T08:08:58.805173Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.805353Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-02-19T08:08:58.805431Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-02-19T08:08:58.805499Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-02-19T08:08:58.805625Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-02-19T08:08:58.805684Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-02-19T08:08:58.805850Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-02-19T08:08:58.805910Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:08:58.805969Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-02-19T08:08:58.806025Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:08:58.806088Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-02-19T08:08:58.806119Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-02-19T08:08:58.806169Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-02-19T08:08:58.806237Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:1099535966835:0] whose ring group state is: 0 2026-02-19T08:08:58.806381Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-02-19T08:08:58.806604Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-02-19T08:08:58.806698Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-02-19T08:08:58.806779Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-02-19T08:08:58.806825Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:08:58.806881Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12346 2026-02-19T08:08:58.806987Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-02-19T08:08:58.807017Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-02-19T08:08:58.807052Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-02-19T08:08:58.807091Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:08:58.807130Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-02-19T08:08:58.813968Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-02-19T08:08:58.818085Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2026-02-19T08:08:58.350860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.352994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-02-19T08:08:58.353104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-02-19T08:08:58.353163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-02-19T08:08:58.353239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-02-19T08:08:58.353317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-02-19T08:08:58.353361Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.353453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-02-19T08:08:58.353512Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.359298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-02-19T08:08:58.359427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2026-02-19T08:08:58.359518Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.889184Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][4:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.890418Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-02-19T08:08:58.890504Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-02-19T08:08:58.890566Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-02-19T08:08:58.890693Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-02-19T08:08:58.890772Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-02-19T08:08:58.890823Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:36:2066][path] Set up state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.890896Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-02-19T08:08:58.890937Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.891388Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-02-19T08:08:58.891466Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.891510Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-02-19T08:08:58.891558Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.891602Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-02-19T08:08:58.891633Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.903087Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-02-19T08:08:58.903238Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-02-19T08:08:58.903313Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.903428Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:48:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-02-19T08:08:58.903522Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:49:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-02-19T08:08:58.903647Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-02-19T08:08:58.903693Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.903779Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-02-19T08:08:58.903815Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.904380Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-02-19T08:08:58.904486Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [4:37:2066] 2026-02-19T08:08:58.904554Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][4:36:2066][path] Update to strong state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2026-02-19T08:08:57.951268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:57.953371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-02-19T08:08:57.953473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-02-19T08:08:57.953514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-02-19T08:08:57.953585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-02-19T08:08:57.953669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-02-19T08:08:57.954617Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:57.954758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-02-19T08:08:57.954825Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:57.955267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-02-19T08:08:57.955379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-02-19T08:08:57.955464Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:57.955638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-02-19T08:08:57.955697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-02-19T08:08:57.955737Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.762462Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.763147Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-02-19T08:08:58.763213Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:6:2053] 2026-02-19T08:08:58.763252Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:9:2056] 2026-02-19T08:08:58.763336Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:38:2067] 2026-02-19T08:08:58.763408Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:39:2067] 2026-02-19T08:08:58.763484Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.763600Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:40:2067] 2026-02-19T08:08:58.763650Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.763747Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-02-19T08:08:58.763848Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-02-19T08:08:58.763927Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-02-19T08:08:58.763982Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-02-19T08:08:58.764100Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-02-19T08:08:58.764146Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-02-19T08:08:58.764177Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-02-19T08:08:58.764278Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-02-19T08:08:58.764327Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:08:58.764368Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-02-19T08:08:58.764399Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:08:58.764478Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-02-19T08:08:58.764516Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |89.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-02-19T08:08:58.916848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:58.919808Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-02-19T08:08:58.919939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-02-19T08:08:58.919982Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:08:58.920242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-02-19T08:08:58.920386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-02-19T08:08:58.920438Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:58.920480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-02-19T08:08:58.920540Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-02-19T08:08:58.920899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-02-19T08:08:58.920974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:20:2066] 2026-02-19T08:08:58.921035Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Update to strong state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-02-19T08:08:58.921340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-02-19T08:08:58.921398Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:21:2066] 2026-02-19T08:08:58.921462Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-02-19T08:08:58.921692Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-02-19T08:08:58.925135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:22:2066] 2026-02-19T08:08:58.925237Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-02-19T08:08:59.325111Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:08:59.325829Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-02-19T08:08:59.325893Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-02-19T08:08:59.325934Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:08:59.326170Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:29:2075] 2026-02-19T08:08:59.326274Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2026-02-19T08:08:59.326327Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:28:2075][TestPath] Set up state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:59.326377Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:31:2075] 2026-02-19T08:08:59.326429Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:24339059:0] 2026-02-19T08:08:59.326702Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:4:2051] 2026-02-19T08:08:59.326774Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:29:2075] 2026-02-19T08:08:59.326830Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Update to strong state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:1099535966835:0] 2026-02-19T08:08:59.327186Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:5:2052] 2026-02-19T08:08:59.327249Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:30:2075] 2026-02-19T08:08:59.327295Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:2199047594611:0] 2026-02-19T08:08:59.327547Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:6:2053] 2026-02-19T08:08:59.327599Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:31:2075] 2026-02-19T08:08:59.327649Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:3298559222387:0] Sending path update to replica: [2:4398070850163:0] Sending path update to replica: [2:5497582477939:0] ... waiting for initial path lookups 2026-02-19T08:08:59.779574Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-02-19T08:08:59.780421Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-02-19T08:08:59.780515Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-02-19T08:08:59.780565Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:08:59.780834Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-02-19T08:08:59.780941Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-02-19T08:08:59.780990Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:08:59.781041Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-02-19T08:08:59.781084Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:24339059:0] 2026-02-19T08:08:59.781385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:4:2051] 2026-02-19T08:08:59.781507Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:29:2075] 2026-02-19T08:08:59.781585Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Update to strong state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:1099535966835:0] 2026-02-19T08:08:59.786264Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:5:2052] 2026-02-19T08:08:59.786399Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:30:2075] 2026-02-19T08:08:59.786487Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:2199047594611:0] 2026-02-19T08:08:59.786898Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:6:2053] 2026-02-19T08:08:59.786980Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:31:2075] 2026-02-19T08:08:59.787029Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:3298559222387:0] Sending path update to replica: [3:4398070850163:0] Sending path update to replica: [3:5497582477939:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/ynks/0044fc/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.ps2u2d_t.txt 2026-02-19T08:08:40.916078Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> ResultFormatter::Pg [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] >> TSubscriberTest::SyncPartial |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2026-02-19T08:09:07.806113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:07.808306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-02-19T08:09:07.808410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-02-19T08:09:07.808454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-02-19T08:09:07.808519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-02-19T08:09:07.808597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-02-19T08:09:07.808640Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:07.808726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-02-19T08:09:07.808776Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:07.809003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 1 2026-02-19T08:09:07.809152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2026-02-19T08:09:07.809231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2026-02-19T08:09:07.809315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 1 2026-02-19T08:09:07.809477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:6:2053], cookie# 1 2026-02-19T08:09:07.809535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 1 2026-02-19T08:09:07.809642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 1 2026-02-19T08:09:07.809686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:07.810234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-02-19T08:09:07.810309Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:07.810364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:38:2066], cookie# 1 2026-02-19T08:09:07.810410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-02-19T08:09:07.810461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 1 2026-02-19T08:09:07.810521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:36:2066][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-02-19T08:09:07.810636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 2 2026-02-19T08:09:07.810766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 2 2026-02-19T08:09:07.810796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:07.810838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 2 2026-02-19T08:09:07.810909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 2 2026-02-19T08:09:07.811024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 2 2026-02-19T08:09:07.811095Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 2 2026-02-19T08:09:07.811131Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-02-19T08:09:07.811163Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 2 2026-02-19T08:09:07.811255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-02-19T08:09:07.811296Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:07.811358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 2 2026-02-19T08:09:07.811385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 2, current cookie# 0 2026-02-19T08:09:07.811478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 3 2026-02-19T08:09:07.811576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 3 2026-02-19T08:09:07.811600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:07.811640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 3 2026-02-19T08:09:07.811678Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-02-19T08:09:07.811708Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 3 2026-02-19T08:09:07.811744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 3 2026-02-19T08:09:07.811919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:39:2066], cookie# 3 2026-02-19T08:09:07.811945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 3, current cookie# 0 2026-02-19T08:09:07.811979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-02-19T08:09:07.812016Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:08.327400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:08.328051Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:3:2050] 2026-02-19T08:09:08.328117Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:6:2053] 2026-02-19T08:09:08.328158Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:9:2056] 2026-02-19T08:09:08.328221Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:38:2067] 2026-02-19T08:09:08.328272Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:39:2067] 2026-02-19T08:09:08.328335Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:08.328678Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:40:2067] 2026-02-19T08:09:08.328750Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:08.328877Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-02-19T08:09:08.328976Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-02-19T08:09:08.329044Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-02-19T08:09:08.329097Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-02-19T08:09:08.329199Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-02-19T08:09:08.329251Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-02-19T08:09:08.329287Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-02-19T08:09:08.329340Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-02-19T08:09:08.329385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:08.329429Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-02-19T08:09:08.329466Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:09:08.329525Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-02-19T08:09:08.329553Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |89.5%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] Test command err: ... waiting for initial path lookups 2026-02-19T08:09:09.473101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:09.474999Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:23:2066] 2026-02-19T08:09:09.475055Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:4:2051] Upsert description: path# TestPath 2026-02-19T08:09:09.475194Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:4:2051] Subscribe: subscriber# [1:23:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:09.475384Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:24:2066] 2026-02-19T08:09:09.475409Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:5:2052] Upsert description: path# TestPath 2026-02-19T08:09:09.475447Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:5:2052] Subscribe: subscriber# [1:24:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:09.475569Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:25:2066] 2026-02-19T08:09:09.475601Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# TestPath 2026-02-19T08:09:09.475635Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:25:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:09.475694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-02-19T08:09:09.475754Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:23:2066] 2026-02-19T08:09:09.475806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-02-19T08:09:09.475851Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:24:2066] 2026-02-19T08:09:09.475882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-02-19T08:09:09.475914Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:25:2066] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-02-19T08:09:09.476192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-02-19T08:09:09.476261Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-02-19T08:09:09.476370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-02-19T08:09:09.476426Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:09.476509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-02-19T08:09:09.476566Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:09.476666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-02-19T08:09:09.476712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12345 2026-02-19T08:09:09.476762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-02-19T08:09:09.476793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12345 2026-02-19T08:09:09.476824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-02-19T08:09:09.476924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-02-19T08:09:09.476988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-02-19T08:09:09.477132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-02-19T08:09:09.477196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:09.477266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-02-19T08:09:09.477315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:09:09.477381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-02-19T08:09:09.477412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-02-19T08:09:09.477470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-02-19T08:09:09.477522Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Updating cluster state generation on replica: [1:1099535966835:0] 2026-02-19T08:09:09.477637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-02-19T08:09:09.477775Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:1240: [1:5:2052] Handle {EvUpdateGroupConfig GroupConfig: empty BoardConfig: empty SchemeBoardConfig: {RingGroups# [0:{NToSelect# 3 Rings# [0:{[[1:24339059:0]]} 1:{[[1:1099535966835:0]]} 2:{[[1:2199047594611:0]]}}] StateStorageVersion# 0 CompatibleVersions# [] ClusterStateGeneration# 1 ClusterStateGuid# 0}} 2026-02-19T08:09:09.477844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-02-19T08:09:09.477888Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12346 2026-02-19T08:09:09.477928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-02-19T08:09:09.477955Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12346 2026-02-19T08:09:09.478005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-02-19T08:09:09.478058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:09.478112Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2026-02-19T08:09:09.478191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:5:2052], cookie# 12346 2026-02-19T08:09:09.478239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2026-02-19T08:09:09.478264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-02-19T08:09:09.478313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersio ... [TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:10.097419Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-02-19T08:09:10.097485Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-02-19T08:09:10.097525Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:09:10.098560Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:18:2065], cookie# 12345 ... waiting for some sync responses 2026-02-19T08:09:10.098649Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:20:2066] 2026-02-19T08:09:10.098735Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:21:2066] 2026-02-19T08:09:10.098807Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:10.098880Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:22:2066] 2026-02-19T08:09:10.098942Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:10.099053Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:20:2066], cookie# 12345 2026-02-19T08:09:10.099127Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:21:2066], cookie# 12345 2026-02-19T08:09:10.099169Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:22:2066], cookie# 12345 2026-02-19T08:09:10.099260Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-02-19T08:09:10.099334Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-02-19T08:09:10.099380Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... waiting for some sync responses (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:09:10.099678Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:20:2066], cookie# 12345 2026-02-19T08:09:10.099745Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:10.099902Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:10.099946Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:1041: [main][3:19:2066][TestPath] Delay current sync request: 12345 ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-02-19T08:09:10.100524Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:21:2066], cookie# 12345 2026-02-19T08:09:10.100574Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:21:2066], cookie# 12345, current cookie# 0 2026-02-19T08:09:10.100619Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:22:2066], cookie# 12345 2026-02-19T08:09:10.100643Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:22:2066], cookie# 12345, current cookie# 0 2026-02-19T08:09:10.101140Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-02-19T08:09:10.101237Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-02-19T08:09:10.101303Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] 2026-02-19T08:09:10.101370Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:27:2066] 2026-02-19T08:09:10.101433Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:28:2066] 2026-02-19T08:09:10.101500Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:10.101553Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2066] 2026-02-19T08:09:10.101599Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:10.101684Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:27:2066], cookie# 12345 2026-02-19T08:09:10.102218Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:28:2066], cookie# 12345 2026-02-19T08:09:10.102308Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2066], cookie# 12345 2026-02-19T08:09:10.102416Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-02-19T08:09:10.102481Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-02-19T08:09:10.102526Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 2026-02-19T08:09:10.102595Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:27:2066], cookie# 12345 2026-02-19T08:09:10.102637Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:10.102705Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:28:2066], cookie# 12345 2026-02-19T08:09:10.102745Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:09:10.102806Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2066], cookie# 12345 2026-02-19T08:09:10.102840Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:29:2066], cookie# 12345, current cookie# 0 >> TSchemeShardServerLess::StorageBillingLabels >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:11.820402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:11.820484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.820542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:11.820603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:11.820639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:11.820673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:11.820716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:11.820740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:11.820808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.820874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:11.821630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:11.821932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:11.907352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:11.907416Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:11.919577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:11.920274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:11.920489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:11.931832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:11.932499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:11.933241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:11.933573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:11.936494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.936669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:11.937917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:11.937974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.938077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:11.938118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:11.938229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:11.938409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:12.102258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.103975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.104033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.104117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.104200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.104269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 9549, txId: 281474976725658 2026-02-19T08:09:13.437335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186234409549, txId: 281474976725658, pathId: [OwnerId: 72075186234409549, LocalPathId: 1], version: 6 2026-02-19T08:09:13.437362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186234409549, LocalPathId: 1] was 5 2026-02-19T08:09:13.437404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976725658, ready parts: 0/1, is published: true 2026-02-19T08:09:13.441167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2026-02-19T08:09:13.441317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-02-19T08:09:13.441450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-02-19T08:09:13.441813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2026-02-19T08:09:13.441958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:13.442082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:13.442129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-02-19T08:09:13.442431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:09:13.442502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-02-19T08:09:13.442638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:09:13.442756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1057:2984], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:09:13.443061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725658 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:09:13.445486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:13.445526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:09:13.445662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:13.445747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:09:13.446050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:09:13.446125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-02-19T08:09:13.446174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Done -> Done 2026-02-19T08:09:13.446776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:09:13.446852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:09:13.446908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:09:13.446944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:09:13.446974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-02-19T08:09:13.447063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:09:13.452193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:09:13.452260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:09:13.452369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:09:13.452403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:13.452457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:09:13.452493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:13.452552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:09:13.452592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:13.452635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:09:13.452665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:09:13.452857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:09:13.475887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:09:13.482782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:09:13.482848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:09:13.483278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:09:13.483386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:09:13.483425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1264:3141] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-02-19T08:09:13.485550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:09:13.485692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2026-02-19T08:09:13.485837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2026-02-19T08:09:13.485988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-02-19T08:09:13.486033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-02-19T08:09:13.493290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:13.493989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 >> TSchemeShardServerLess::StorageBilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:11.842266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:11.842375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.842456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:11.842498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:11.842535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:11.842577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:11.842602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:11.842631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:11.842726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.842809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:11.843644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:11.843952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:11.919499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:11.919562Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:11.930613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:11.934103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:11.934294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:11.947466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:11.948333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:11.949033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:11.949362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:11.962726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.962933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:11.964191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:11.964261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.964369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:11.964415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:11.964512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:11.964717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:12.116128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.117972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.118060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:12.118141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 44, message: Origin: 72075186233409546 TxId: 106 2026-02-19T08:09:13.658013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:795: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2026-02-19T08:09:13.658066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 106:0 SyncHive -> Done 2026-02-19T08:09:13.660208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409549, cookie: 0 2026-02-19T08:09:13.660331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:09:13.660398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:09:13.662716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:09:13.662862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:09:13.662893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 106:0 ProgressState 2026-02-19T08:09:13.662988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:09:13.663018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:09:13.663048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:09:13.663071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:09:13.663098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-02-19T08:09:13.663137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:09:13.663167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:09:13.663189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 106:0 2026-02-19T08:09:13.663247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:09:13.668328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:09:13.668383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:09:13.668762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:09:13.668837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:09:13.668867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1348:3225] TestWaitNotification: OK eventTxId 106 2026-02-19T08:09:13.669363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:13.669534Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 177us result status StatusSuccess 2026-02-19T08:09:13.669877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:13.670363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2026-02-19T08:09:13.670496Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 135us result status StatusSuccess 2026-02-19T08:09:13.670810Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409549 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976725657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2026-02-19T08:09:13.671304Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:13.671475Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 168us result status StatusSuccess 2026-02-19T08:09:13.671704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:13.672192Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:39 >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:13.236005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:13.236077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:13.236148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:13.236209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:13.236252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:13.236289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:13.236324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:13.236353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:13.236424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:13.236501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:13.237162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:13.237418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:13.348928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:13.348972Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:13.365467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:13.366996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:13.367203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:13.381401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:13.383356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:13.384136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:13.384459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:13.387850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:13.388022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:13.389211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:13.389284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:13.389430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:13.389476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:13.389579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:13.389802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:13.578716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.579857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.579996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:13.580930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 5, at schemeshard: 72057594046678944 2026-02-19T08:09:15.616061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:09:15.617771Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 Forgetting tablet 72075186234409551 2026-02-19T08:09:15.617928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-02-19T08:09:15.618085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 Forgetting tablet 72075186234409550 2026-02-19T08:09:15.619646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:09:15.619774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:09:15.620753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:09:15.620798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:09:15.620882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:09:15.621230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:09:15.621444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:09:15.621491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:09:15.621566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:09:15.625675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:09:15.625741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2026-02-19T08:09:15.625808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:7 2026-02-19T08:09:15.625833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2026-02-19T08:09:15.626285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:09:15.626334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2026-02-19T08:09:15.628017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:09:15.628089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:09:15.628424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:09:15.628465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:09:15.628937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:09:15.629023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:09:15.638868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1655:3511] TestWaitNotification: OK eventTxId 106 2026-02-19T08:09:15.640236Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:15.640561Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 317us result status StatusPathDoesNotExist 2026-02-19T08:09:15.640769Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:09:15.641460Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:15.641627Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 175us result status StatusPathDoesNotExist 2026-02-19T08:09:15.641774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:09:15.642309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:15.644239Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2026-02-19T08:09:15.644792Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SharedDB" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2026-02-19T08:09:15.645545Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2026-02-19T08:09:15.645659Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2026-02-19T08:09:15.645725Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2026-02-19T08:09:15.645807Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:13.974365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:13.974438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:13.974481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:13.974523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:13.974555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:13.974582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:13.974603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:13.974697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:13.974755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:13.974822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:13.975458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:13.975700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:14.078607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:14.078666Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:14.090974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:14.096919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:14.097157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:14.127270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:14.130083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:14.130859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:14.131206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:14.135398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:14.135588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:14.136703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:14.136764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:14.136878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:14.136921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:14.137014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:14.137216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:14.301346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.303753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.303921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.304922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... dLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-02-19T08:09:16.344475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-02-19T08:09:16.344705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2026-02-19T08:09:16.350310Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-02-19T08:09:16.350465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186234409547 2026-02-19T08:09:16.351785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:09:16.352037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:09:16.354152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:09:16.354216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:09:16.354363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:09:16.354873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:09:16.354918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:09:16.354991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:09:16.370376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:09:16.370464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2026-02-19T08:09:16.370697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:7 2026-02-19T08:09:16.370726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2026-02-19T08:09:16.370779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:09:16.370820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2026-02-19T08:09:16.371203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:09:16.371320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:09:16.371632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:09:16.371677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:09:16.372068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:09:16.372160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:09:16.372194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1744:3599] TestWaitNotification: OK eventTxId 106 2026-02-19T08:09:16.372723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:16.372927Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 237us result status StatusPathDoesNotExist 2026-02-19T08:09:16.373081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:09:16.373970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:16.374128Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 180us result status StatusPathDoesNotExist 2026-02-19T08:09:16.374251Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:09:16.374693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:09:16.374855Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 178us result status StatusSuccess 2026-02-19T08:09:16.375253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SharedDB" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2026-02-19T08:09:16.375891Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2026-02-19T08:09:16.375979Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2026-02-19T08:09:16.376025Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2026-02-19T08:09:16.376068Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> DataShardReadTableSnapshots::ReadTableSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:14.335960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:14.336044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:14.336102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:14.336140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:14.336197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:14.336235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:14.336265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:14.336294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:14.336371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:14.336449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:14.337283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:14.337569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:14.430268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:14.430318Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:14.442340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:14.443691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:14.443904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:14.465546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:14.466697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:14.467466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:14.467788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:14.471184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:14.471356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:14.472531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:14.472597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:14.472707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:14.472755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:14.472857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:14.473043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:14.624343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.625478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.625613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.625760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.625847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.625918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:14.626558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-02-19T08:09:16.621187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2816] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_permissions' 2026-02-19T08:09:16.621243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-02-19T08:09:16.621316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2816] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_minute' 2026-02-19T08:09:16.621346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:867:2816] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished 2026-02-19T08:09:16.622439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-02-19T08:09:16.622536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-02-19T08:09:16.622619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:09:16.622668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1339:3215] 2026-02-19T08:09:16.622881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725675 2026-02-19T08:09:16.623011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_queries_by_request_units_one_hour' 2026-02-19T08:09:16.623052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725670 2026-02-19T08:09:16.623125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/nodes' 2026-02-19T08:09:16.623177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725674 2026-02-19T08:09:16.623281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_partitions_by_tli_one_hour' 2026-02-19T08:09:16.623323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725669 2026-02-19T08:09:16.623403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/resource_pool_classifiers' 2026-02-19T08:09:16.623454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725673 2026-02-19T08:09:16.623539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_queries_by_cpu_time_one_minute' 2026-02-19T08:09:16.623572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725668 2026-02-19T08:09:16.623630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_queries_by_read_bytes_one_minute' 2026-02-19T08:09:16.633035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725660 2026-02-19T08:09:16.633186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725665 2026-02-19T08:09:16.633250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725666 2026-02-19T08:09:16.633306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725667 2026-02-19T08:09:16.633383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725672 2026-02-19T08:09:16.633453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725672 2026-02-19T08:09:16.633531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_partitions_by_tli_one_minute' 2026-02-19T08:09:16.633563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725667 2026-02-19T08:09:16.633669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_queries_by_request_units_one_minute' 2026-02-19T08:09:16.633739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725666 2026-02-19T08:09:16.633814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/compile_cache_queries' 2026-02-19T08:09:16.642036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725665 2026-02-19T08:09:16.642240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/top_queries_by_duration_one_hour' 2026-02-19T08:09:16.642327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 281474976725660 2026-02-19T08:09:16.642408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/ServerlessDB/.sys/tables' 2026-02-19T08:09:16.642450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:1129:3035] at schemeshard: 72075186233409549 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-02-19T08:09:16.646725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-02-19T08:09:16.646972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2026-02-19T08:09:16.647386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2026-02-19T08:09:16.649908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:09:16.650149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2026-02-19T08:09:16.652826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-02-19T08:09:16.653031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2026-02-19T08:09:16.653351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2026-02-19T08:09:16.656983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:09:16.657253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 >> DataShardReadTableSnapshots::ReadTableDropColumn |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSubscriberSyncQuorumTest::TwoRingGroups >> TSubscriberSyncQuorumTest::TwoRingGroups [GOOD] >> TSubscriberTest::Boot |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TSubscriberTest::Boot [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] Test command err: 2026-02-19T08:09:08.577663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485378941033187:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:08.577734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:09.012317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.047246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:09.047378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:09.132833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485378941033150:2081] 1771488548575155 != 1771488548575158 2026-02-19T08:09:09.148033Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:09.151358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:09.200263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.485310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:09.494085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:09:09.590928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:10.512066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:13.581851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485378941033187:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.581923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:09:13.725411Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7608485400415870669:2600], ActorId: [1:7608485400415870670:2600], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=NGJmM2I2OTEtZjVhNTg0N2MtMzQ1N2JkYjEtZDg0NmVmNWY=, TxId: 01khtf4aqf8btffrv1qerr7ghj 2026-02-19T08:09:13.728186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485400415870691:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.728265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.728887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485400415870702:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.729199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TSubscriberTest::NotifyDelete >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: ... waiting for initial path lookups 2026-02-19T08:09:20.573476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-02-19T08:09:20.576192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-02-19T08:09:20.576287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-02-19T08:09:20.576316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-02-19T08:09:20.576346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-02-19T08:09:20.576418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-02-19T08:09:20.576461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] Poisoning replica: [1:5497582477939:0] 2026-02-19T08:09:20.576801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12345 2026-02-19T08:09:20.576876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-02-19T08:09:20.577011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-02-19T08:09:20.577038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-02-19T08:09:20.577079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-02-19T08:09:20.577113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-02-19T08:09:20.577238Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:20.577305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-02-19T08:09:20.577374Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:20.577509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12345 2026-02-19T08:09:20.577572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12345 2026-02-19T08:09:20.577630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:31:2075], cookie# 12345 2026-02-19T08:09:20.577674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12345 2026-02-19T08:09:20.577743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12345 2026-02-19T08:09:20.577810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:34:2075], cookie# 12345 2026-02-19T08:09:20.577917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-02-19T08:09:20.577956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-02-19T08:09:20.578006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12345 2026-02-19T08:09:20.578049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12345 2026-02-19T08:09:20.578217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12345 2026-02-19T08:09:20.578271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:20.578329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12345 2026-02-19T08:09:20.578356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:09:20.578378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-02-19T08:09:20.578401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12345 2026-02-19T08:09:20.578419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-02-19T08:09:20.578434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-02-19T08:09:20.578470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-02-19T08:09:20.578520Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:20.578567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12345 2026-02-19T08:09:20.578595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-02-19T08:09:20.578628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:09:20.578674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12345 2026-02-19T08:09:20.578701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-02-19T08:09:20.578724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:09:20.578785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12345 2026-02-19T08:09:20.578814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:28:2075][TestPath] Sync cookie mismatch: sender# [1:34:2075], cookie# 12345, current cookie# 0 2026-02-19T08:09:20.578882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-02-19T08:09:20.578959Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:24339059:0] whose ring group state is: 0 2026-02-19T08:09:20.579096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12346 2026-02-19T08:09:20.579260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12346 2026-02-19T08:09:20.579331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12346 2026-02-19T08:09:20.579382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12346 2026-02-19T08:09:20.579412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:20.579437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12346 2026-02-19T08:09:20.579476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12346 2026-02-19T08:09:20.579532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12346 2026-02-19T08:09:20.579571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:20.579655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-02-19T08:09:20.579684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12346 2026-02-19T08:09:20.579703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12346 2026-02-19T08:09:20.579749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:29:2075], cookie# 12346 2026-02-19T08:09:20.579790Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-02-19T08:09:20.579834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:20.579874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-02-19T08:09:20.579944Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:20.581390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12346 2026-02-19T08:09:20.581440Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:09:20.581468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-02-19T08:09:20.581503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12346 2026-02-19T08:09:20.581543Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:09:20.581559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-02-19T08:09:20.581590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12346 2026-02-19T08:09:20.581618Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-02-19T08:09:20.581642Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-02-19T08:09:20.581665Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-02-19T08:09:20.910255Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:20.910800Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:3:2050] 2026-02-19T08:09:20.910862Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:6:2053] 2026-02-19T08:09:20.910889Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:9:2056] 2026-02-19T08:09:20.910970Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2066] 2026-02-19T08:09:20.911045Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:38:2066] 2026-02-19T08:09:20.911081Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:36:2066][path] Set up state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:20.911126Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:39:2066] 2026-02-19T08:09:20.911155Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:36:2066][path] Ignore empty state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::InvalidNotification |89.5%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> TSubscriberTest::InvalidNotification [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] Test command err: 2026-02-19T08:09:08.580093Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485381801030960:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:08.582184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:09.005197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.056061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:09.056189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:09.127990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485381801030918:2081] 1771488548574684 != 1771488548574687 2026-02-19T08:09:09.139192Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:09.145144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:09.174820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.469420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:09.604695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:10.502138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:13.582099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485381801030960:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.582170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:09:13.723078Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7608485403275868435:2600], ActorId: [1:7608485403275868436:2600], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=MmVmYjE1YzctOTc1OWZkYWEtNDBjNzVhMTItODE3ZjUxMjA=, TxId: 01khtf4aqb6y9aedjvpw04qmbk 2026-02-19T08:09:13.726378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485403275868457:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.726467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.726871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485403275868470:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.726922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.914779Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485401821861508:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.914812Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:13.938657Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:14.103164Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:14.103224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:14.103298Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:14.105374Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485401821861470:2081] 1771488553912913 != 1771488553912916 2026-02-19T08:09:14.128753Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:14.219546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:14.923097Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:15.047635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:16.066494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] Test command err: 2026-02-19T08:09:08.600343Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485382243882119:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:08.600411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:09.049905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.158207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:09.158291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:09.253335Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:09.258106Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485382243881871:2081] 1771488548580909 != 1771488548580912 2026-02-19T08:09:09.267067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:09.273793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.592211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:09.601976Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:09.606490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:10.626967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:10.866229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:13.600107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485382243882119:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.600189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:09:13.993326Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485402092198503:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.993368Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:14.071280Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:14.212806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:14.212886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:14.239279Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:14.249383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:14.349070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:15.048010Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:15.064627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:16.075269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:17.409592Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7608485419272068662:2598], ActorId: [2:7608485419272068663:2598], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=MWRkMTJlODItOWJkOTAzNjMtMzA5N2M2ZTItODY5NDU5MGU=, TxId: 01khtf4eab6ep1qmewyfk92025 2026-02-19T08:09:17.411854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608485419272068684:2331], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.411965Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.412256Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608485419272068697:2332], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.412311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:20.601957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:20.602217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:20.602295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:20.602335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:20.602372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:20.602416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:20.602451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:20.602480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:20.602555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:20.602672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:20.603626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:20.603974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:20.693001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:20.693072Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:20.710025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:20.710954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:20.711170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:20.728886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:20.729891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:20.730581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:20.730977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:20.736975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:20.737200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:20.738685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:20.738769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:20.738887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:20.738940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:20.739063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:20.739276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:20.899209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.900968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.901052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.901157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.901242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.901339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:20.901438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 22.097610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:09:22.097685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-02-19T08:09:22.097812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:09:22.097908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1057:2984], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:09:22.100053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:22.100098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:09:22.100272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:22.100311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:09:22.100641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:09:22.100743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-02-19T08:09:22.100794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Done -> Done 2026-02-19T08:09:22.101437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:09:22.101530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:09:22.101563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:09:22.101596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:09:22.101825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-02-19T08:09:22.101910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:09:22.104528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:09:22.104590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:09:22.104694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:09:22.104727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:22.104761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:09:22.104805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:22.104843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:09:22.104878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:09:22.104913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:09:22.104940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:09:22.105120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:09:22.105770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:09:22.107300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:09:22.107341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:09:22.107756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:09:22.107861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:09:22.107912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1264:3141] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-02-19T08:09:22.110687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:09:22.110843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2026-02-19T08:09:22.110883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2026-02-19T08:09:22.111038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-02-19T08:09:22.111077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-02-19T08:09:22.113289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:22.113487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2026-02-19T08:09:22.115867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:09:22.116009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2026-02-19T08:09:22.116048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2026-02-19T08:09:22.116198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-02-19T08:09:22.116245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-02-19T08:09:22.118632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:09:22.118839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2026-02-19T08:09:21.843926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:21.845950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-02-19T08:09:21.846077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2026-02-19T08:09:21.846156Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2026-02-19T08:09:21.846216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2067] 2026-02-19T08:09:21.846256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:39:2067] 2026-02-19T08:09:21.846319Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:37:2067][path] Set up state: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:21.846455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:40:2067] 2026-02-19T08:09:21.846514Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:21.846812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2026-02-19T08:09:21.846878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2026-02-19T08:09:21.846923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2067] 2026-02-19T08:09:21.846979Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:37:2067][path] Path was updated to new version: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:21.847068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:39:2067] 2026-02-19T08:09:21.847110Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:22.316198Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:22.316884Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-02-19T08:09:22.316951Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-02-19T08:09:22.316995Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-02-19T08:09:22.317059Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2066] 2026-02-19T08:09:22.317128Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:38:2066] 2026-02-19T08:09:22.317163Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:36:2066][path] Set up state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:22.317258Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:39:2066] 2026-02-19T08:09:22.317307Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:36:2066][path] Ignore empty state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:22.317450Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] 2026-02-19T08:09:22.317496Z node 3 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:818: [main][3:36:2066][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] Test command err: 2026-02-19T08:09:08.583116Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485380051228143:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:08.583161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:09.017838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.093068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:09.099427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:09.104264Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:09.106426Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485380051228105:2081] 1771488548574689 != 1771488548574692 2026-02-19T08:09:09.152785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:09.221788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:09.471654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:09.608266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:10.502218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:13.585769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485380051228143:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.585826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:09:13.723702Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7608485401526065623:2600], ActorId: [1:7608485401526065624:2600], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=ODdhODY4MjctYjYxNDQzNmEtOTA4MjY0YzEtNDQ5ZTE1NDQ=, TxId: 01khtf4aqeeeypa9k8aw55mha2 2026-02-19T08:09:13.730129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485401526065646:2334], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.730302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.732638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485401526065659:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.732712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:13.960409Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485402374656724:2166];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:13.960509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:14.011767Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:14.173590Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:14.175115Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485402374656586:2081] 1771488553941689 != 1771488553941692 2026-02-19T08:09:14.175426Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:14.175531Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:14.215960Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:14.313971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:14.998085Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:15.065749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:16.077775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:17.479757Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7608485419554526792:2594], ActorId: [2:7608485419554526793:2594], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=NmY0MDJkMi03NDA2YTczLWU3ZmQ3MTZlLWYyNDI3MzZh, TxId: 01khtf4ecy6x5a94m1t1k6q9t1 2026-02-19T08:09:17.482645Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608485419554526815:2331], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.483078Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.483866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608485419554526828:2332], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:09:17.483914Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] Test command err: 2026-02-19T08:09:10.435901Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485389116207022:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:10.436546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:11.010850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:11.010958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:11.125245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:11.125347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:11.198863Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:11.207510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485389116206981:2081] 1771488550370931 != 1771488550370934 2026-02-19T08:09:11.422480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:11.442101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:11.538715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:11.546115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:12.580015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:15.426059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485389116207022:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:15.426122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:09:15.570254Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485410233400868:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:09:15.571863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:09:15.669454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:09:15.737399Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:15.740031Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485410233400815:2081] 1771488555555335 != 1771488555555338 2026-02-19T08:09:15.762796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:15.762893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:15.786534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:15.851751Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:09:16.574903Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:16.646226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:16.656982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:09:17.664607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:17.786191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID >> test_analyze.py::test_basic >> test_select.py::TestPgSelect::test_as_table |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:09:19.784213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:09:19.911747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:09:19.931473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:09:19.931559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:09:19.932140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:09:20.253216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:20.253401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:20.329888Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488556977056 != 1771488556977060 2026-02-19T08:09:20.339902Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:20.408818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:20.524193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:09:20.816382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:20.830914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:20.936089Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:09:20.936162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:09:20.936303Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:09:21.057308Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:09:21.057441Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:09:21.058183Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:09:21.058293Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:09:21.058727Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:09:21.058884Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:09:21.059011Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:09:21.059301Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:09:21.061249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:21.062488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:09:21.062569Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:09:21.095875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:09:21.096971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:09:21.097317Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:09:21.097644Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:09:21.108660Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:09:21.160313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:09:21.160454Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:09:21.162557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:09:21.162665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:09:21.162745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:09:21.163140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:09:21.163288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:09:21.163394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:09:21.174131Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:09:21.222714Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:09:21.222932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:09:21.223077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:09:21.223119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:09:21.223172Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:09:21.223213Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:21.223495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:21.223556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:21.223965Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:09:21.224063Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:09:21.224138Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:21.224179Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:21.224240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:09:21.224301Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:21.224336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:21.224386Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:09:21.224455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:21.224559Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:21.224598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:21.224644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:09:21.224787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:09:21.224830Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:09:21.224977Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:09:21.225271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:09:21.225324Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:09:21.225428Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... ead_table_scan.cpp:662: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:28.484838Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1553:3261], Recipient [2:1284:3048]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2026-02-19T08:09:28.484873Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1284:3048] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2026-02-19T08:09:28.484900Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1284:3048] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1553:3261] ShardId# 72075186224037896 2026-02-19T08:09:28.485042Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1553:3261], Recipient [2:1284:3048]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2026-02-19T08:09:28.485071Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1284:3048] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2026-02-19T08:09:28.485128Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2026-02-19T08:09:28.485548Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1283:3048], Recipient [2:1284:3048]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2026-02-19T08:09:28.485584Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1284:3048] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:28.485611Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1284:3048] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2026-02-19T08:09:28.485652Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-02-19T08:09:28.485735Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-02-19T08:09:28.485870Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1553:3261], Recipient [2:1284:3048]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2026-02-19T08:09:28.485898Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1284:3048] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2026-02-19T08:09:28.485937Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1284:3048] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2026-02-19T08:09:28.486012Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037896 2026-02-19T08:09:28.486050Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715664, at: 72075186224037896 2026-02-19T08:09:28.486175Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1454:3182], Recipient [2:1454:3182]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:28.486206Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:28.486243Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037896 2026-02-19T08:09:28.486283Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:28.486316Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2026-02-19T08:09:28.486340Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2026-02-19T08:09:28.486368Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2026-02-19T08:09:28.486403Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-02-19T08:09:28.486430Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2026-02-19T08:09:28.486457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2026-02-19T08:09:28.486492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-02-19T08:09:28.486534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2026-02-19T08:09:28.486561Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2026-02-19T08:09:28.486586Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2026-02-19T08:09:28.486614Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2026-02-19T08:09:28.486653Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-02-19T08:09:28.486731Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2026-02-19T08:09:28.486759Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2026-02-19T08:09:28.486786Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:28.486813Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037896 2026-02-19T08:09:28.486836Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037896 has no attached operations 2026-02-19T08:09:28.486870Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037896 2026-02-19T08:09:28.486921Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037896 2026-02-19T08:09:28.486949Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-02-19T08:09:28.486982Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:09:28.487043Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2026-02-19T08:09:28.487289Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1454:3182], Recipient [2:1284:3048]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 323 } } CommitVersion { Step: 0 TxId: 281474976715664 } 2026-02-19T08:09:28.487326Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1284:3048] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2026-02-19T08:09:28.487400Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1284:3048] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017864s execute time: 0.568629s total time: 0.586493s 2026-02-19T08:09:28.487834Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1089:2900]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.487981Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1197:2982]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.488344Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1200:2984]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.488603Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1449:3180]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.488910Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1556:3264], Recipient [2:1342:3098]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:28.488940Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:28.488976Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037893, clientId# [2:1554:3262], serverId# [2:1556:3264], sessionId# [0:0:0] 2026-02-19T08:09:28.489034Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1557:3265], Recipient [2:1345:3100]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:28.489072Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:28.489097Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1555:3263], serverId# [2:1557:3265], sessionId# [0:0:0] 2026-02-19T08:09:28.489176Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1454:3182]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.489581Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1342:3098]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-02-19T08:09:28.489740Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1284:3048], Recipient [2:1345:3100]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:09:20.977098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:09:21.079196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:09:21.095497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:09:21.095580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:09:21.096223Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:09:21.392845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:21.392988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:21.474472Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488557967666 != 1771488557967670 2026-02-19T08:09:21.484533Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:21.531720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:21.623211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:09:21.926310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:21.939663Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:22.042008Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:09:22.042082Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:09:22.042213Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:09:22.173670Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:09:22.173802Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:09:22.174507Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:09:22.174620Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:09:22.175046Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:09:22.175187Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:09:22.175313Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:09:22.175604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:09:22.177376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:22.178534Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:09:22.178616Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:09:22.209229Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:09:22.210273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:09:22.210579Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:09:22.210879Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:09:22.220145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:09:22.250875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:09:22.251034Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:09:22.252860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:09:22.252961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:09:22.253043Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:09:22.253449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:09:22.253571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:09:22.253655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:09:22.264358Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:09:22.314946Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:09:22.315236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:09:22.315414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:09:22.315464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:09:22.315529Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:09:22.315584Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:22.315852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:22.315924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:22.316363Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:09:22.316493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:09:22.316602Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:22.316649Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:22.316719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:09:22.316770Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:22.316818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:22.316888Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:09:22.316961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:22.317081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:22.317122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:22.317175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:09:22.317385Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:09:22.317450Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:09:22.317572Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:09:22.317991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:09:22.318059Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:09:22.318180Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... sactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2026-02-19T08:09:28.849038Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-02-19T08:09:28.849067Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1067:2884] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1203:2991] ShardId# 72075186224037890 2026-02-19T08:09:28.849133Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-02-19T08:09:28.849203Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1203:2991], Recipient [2:1067:2884]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-02-19T08:09:28.849244Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-02-19T08:09:28.849605Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1066:2884], Recipient [2:1067:2884]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-02-19T08:09:28.849650Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:28.849677Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-02-19T08:09:28.849741Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:28.849839Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:28.849973Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1203:2991], Recipient [2:1067:2884]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-02-19T08:09:28.850022Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-02-19T08:09:28.850054Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1067:2884] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1203:2991] ShardId# 72075186224037890 2026-02-19T08:09:28.850158Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1203:2991], Recipient [2:1067:2884]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-02-19T08:09:28.850187Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-02-19T08:09:28.850223Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-02-19T08:09:28.850495Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1066:2884], Recipient [2:1067:2884]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-02-19T08:09:28.850538Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:28.850563Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-02-19T08:09:28.850604Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:28.850662Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:28.850827Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1203:2991], Recipient [2:1067:2884]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2026-02-19T08:09:28.850859Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2026-02-19T08:09:28.850886Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1067:2884] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2026-02-19T08:09:28.850947Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037890 2026-02-19T08:09:28.850992Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715662, at: 72075186224037890 2026-02-19T08:09:28.851127Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1108:2916], Recipient [2:1108:2916]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:28.851171Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:28.851223Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:09:28.851254Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:28.851290Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2026-02-19T08:09:28.851319Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2026-02-19T08:09:28.851350Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2026-02-19T08:09:28.851386Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-02-19T08:09:28.851413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2026-02-19T08:09:28.851438Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2026-02-19T08:09:28.851463Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-02-19T08:09:28.851496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2026-02-19T08:09:28.851522Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2026-02-19T08:09:28.851546Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:09:28.851582Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:09:28.851631Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-02-19T08:09:28.851653Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:09:28.851675Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2026-02-19T08:09:28.851702Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:28.851727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-02-19T08:09:28.851754Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-02-19T08:09:28.851783Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-02-19T08:09:28.851838Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:09:28.851869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-02-19T08:09:28.851904Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:09:28.851969Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:09:28.852215Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1108:2916], Recipient [2:1067:2884]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 379 } } CommitVersion { Step: 0 TxId: 281474976715662 } 2026-02-19T08:09:28.852256Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2026-02-19T08:09:28.852320Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1067:2884] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013776s execute time: 0.311879s total time: 0.325655s 2026-02-19T08:09:28.852734Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1067:2884], Recipient [2:892:2770]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-02-19T08:09:28.852930Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1067:2884], Recipient [2:1108:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-02-19T08:09:28.853182Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1067:2884], Recipient [2:1103:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:09:21.373818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:09:21.493485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:09:21.514224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:09:21.514316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:09:21.515768Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:09:21.816394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:21.816544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:21.904689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488558418125 != 1771488558418129 2026-02-19T08:09:21.914442Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:21.961307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:22.064543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:09:22.384159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:22.397428Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:22.498625Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:09:22.498709Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:09:22.498879Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:09:22.656051Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:09:22.656147Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:09:22.656711Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:09:22.656830Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:09:22.657139Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:09:22.657262Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:09:22.657365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:09:22.657621Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:09:22.659122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:22.660118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:09:22.660190Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:09:22.691055Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:09:22.692292Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:09:22.692590Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:09:22.693014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:09:22.703825Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:09:22.742909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:09:22.743058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:09:22.744881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:09:22.744984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:09:22.745058Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:09:22.745468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:09:22.745607Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:09:22.745741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:09:22.756513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:09:22.789963Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:09:22.790175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:09:22.790297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:09:22.790332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:09:22.790390Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:09:22.790436Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:22.790676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:22.790756Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:22.791255Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:09:22.791385Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:09:22.791473Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:22.791532Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:22.791599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:09:22.791647Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:22.791686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:22.791731Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:09:22.791807Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:22.791913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:22.792049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:22.792102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:09:22.792213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:09:22.792262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:09:22.792382Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:09:22.792645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:09:22.792708Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:09:22.792838Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... g event TEvTxProcessing::TEvStreamClearancePending 2026-02-19T08:09:29.095113Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287940, Sender [2:965:2821], Recipient [2:892:2770]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2026-02-19T08:09:29.095145Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3237: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-02-19T08:09:29.095239Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:892:2770], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:29.095268Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:29.095317Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:29.095353Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:29.095398Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:09:29.095433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2026-02-19T08:09:29.095473Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2026-02-19T08:09:29.095505Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-02-19T08:09:29.095540Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2026-02-19T08:09:29.095570Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2026-02-19T08:09:29.095598Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-02-19T08:09:29.095821Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2026-02-19T08:09:29.095855Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:29.095883Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:09:29.095912Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:29.095939Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:29.095989Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:29.096435Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435082, Sender [2:981:2836], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:29.096479Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3246: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:29.097069Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:981:2836], Recipient [2:965:2821]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2026-02-19T08:09:29.097112Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:965:2821] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-02-19T08:09:29.097482Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:964:2821], Recipient [2:965:2821]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2026-02-19T08:09:29.097539Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:965:2821] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:29.097579Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:965:2821] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2026-02-19T08:09:29.097669Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-02-19T08:09:29.097810Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2026-02-19T08:09:29.097866Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-02-19T08:09:29.098049Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:09:29.098094Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715659, at: 72075186224037888 2026-02-19T08:09:29.098220Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:981:2836], Recipient [2:965:2821]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2026-02-19T08:09:29.098263Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:965:2821] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-02-19T08:09:29.098301Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:965:2821] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-02-19T08:09:29.098635Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:892:2770], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:29.098695Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:29.098765Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:29.098810Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:29.098857Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2026-02-19T08:09:29.098895Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-02-19T08:09:29.098937Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2026-02-19T08:09:29.098998Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-02-19T08:09:29.099037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2026-02-19T08:09:29.099078Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:09:29.099110Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-02-19T08:09:29.099144Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2026-02-19T08:09:29.099180Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:09:29.099224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:09:29.099254Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:09:29.099301Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-02-19T08:09:29.099325Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:09:29.099421Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2026-02-19T08:09:29.099474Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:29.099509Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:09:29.099545Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:29.099579Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:29.099642Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:29.099680Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-02-19T08:09:29.099721Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2026-02-19T08:09:29.099770Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2026-02-19T08:09:29.099849Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:29.100145Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:892:2770], Recipient [2:965:2821]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 519 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2026-02-19T08:09:29.100189Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1922: [ReadTable [2:965:2821] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2026-02-19T08:09:29.100249Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2920: [ReadTable [2:965:2821] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2026-02-19T08:09:29.100557Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:965:2821], Recipient [2:892:2770]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 2500 TxId: 281474976715658 >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:09:22.583125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:09:22.694770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:09:22.717350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:09:22.717434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:09:22.717940Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:09:23.005399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:23.005566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:23.075572Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488559728932 != 1771488559728936 2026-02-19T08:09:23.086054Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:23.129834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:23.260228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:09:23.566809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:23.580259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:23.684473Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:09:23.684557Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:09:23.684684Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:09:23.824362Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:09:23.824470Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:09:23.825121Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:09:23.825229Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:09:23.825578Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:09:23.825697Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:09:23.825894Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:09:23.826167Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:09:23.827814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:23.828856Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:09:23.828913Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:09:23.871078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:09:23.872066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:09:23.872350Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:09:23.872628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:09:23.882143Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:09:23.915909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:09:23.916024Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:09:23.917423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:09:23.917528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:09:23.917599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:09:23.917998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:09:23.918155Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:09:23.918256Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:09:23.929034Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:09:23.969497Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:09:23.969689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:09:23.969806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:09:23.969839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:09:23.969886Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:09:23.969921Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:23.970123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:23.970185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:23.970465Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:09:23.970535Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:09:23.970629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:23.970675Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:23.970716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:09:23.970747Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:23.970777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:23.970802Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:09:23.970853Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:23.970995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:23.971032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:23.971071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:09:23.971169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:09:23.971216Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:09:23.971325Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:09:23.971553Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:09:23.971603Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:09:23.971703Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:09:30.282040Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435082, Sender [2:1203:2991], Recipient [2:1093:2903]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:30.282082Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3246: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:30.282169Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1203:2991], Recipient [2:1176:2966]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-02-19T08:09:30.282199Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1176:2966] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-02-19T08:09:30.282237Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1176:2966] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1203:2991] 2026-02-19T08:09:30.282347Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-02-19T08:09:30.282627Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:30.282783Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1203:2991], Recipient [2:1176:2966]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-02-19T08:09:30.282817Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1176:2966] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-02-19T08:09:30.282847Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1176:2966] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1203:2991] ShardId# 72075186224037890 2026-02-19T08:09:30.282898Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-02-19T08:09:30.282969Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1203:2991], Recipient [2:1176:2966]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-02-19T08:09:30.282996Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1176:2966] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-02-19T08:09:30.283278Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1175:2966], Recipient [2:1176:2966]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2026-02-19T08:09:30.283311Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1176:2966] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:30.283339Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1176:2966] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1203:2991] 2026-02-19T08:09:30.283395Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-02-19T08:09:30.283460Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:30.283587Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1203:2991], Recipient [2:1176:2966]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2026-02-19T08:09:30.283618Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1176:2966] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-02-19T08:09:30.283645Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1176:2966] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1203:2991] ShardId# 72075186224037890 2026-02-19T08:09:30.283722Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1176:2966] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.016649s execute time: 0.186501s total time: 0.203150s 2026-02-19T08:09:30.283935Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-02-19T08:09:30.283980Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2026-02-19T08:09:30.284355Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1176:2966], Recipient [2:1089:2900]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 2026-02-19T08:09:30.284551Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037890 2026-02-19T08:09:30.284585Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715663, at: 72075186224037890 2026-02-19T08:09:30.284760Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1093:2903], Recipient [2:1093:2903]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:30.284792Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:30.284836Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:09:30.284870Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:30.284906Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2026-02-19T08:09:30.284936Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2026-02-19T08:09:30.284971Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2026-02-19T08:09:30.285006Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-02-19T08:09:30.285038Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2026-02-19T08:09:30.285067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2026-02-19T08:09:30.285096Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-02-19T08:09:30.285132Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2026-02-19T08:09:30.285160Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2026-02-19T08:09:30.285189Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:09:30.285219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:09:30.285276Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-02-19T08:09:30.285301Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:09:30.285326Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2026-02-19T08:09:30.285357Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:30.285384Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-02-19T08:09:30.285412Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-02-19T08:09:30.285446Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-02-19T08:09:30.285493Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:09:30.285524Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-02-19T08:09:30.285564Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:09:30.285627Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:09:30.285877Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549569, Sender [2:1176:2966], Recipient [2:1093:2903]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2026-02-19T08:09:30.285917Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3258: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2026-02-19T08:09:30.285969Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2026-02-19T08:09:30.286026Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2026-02-19T08:09:30.286190Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287431, Sender [2:1176:2966], Recipient [2:1093:2903]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2026-02-19T08:09:30.286228Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3239: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2026-02-19T08:09:30.286382Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1176:2966], Recipient [2:1093:2903]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:09:23.001098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:09:23.122447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:09:23.142472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:09:23.142549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:09:23.143160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:09:23.457935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:09:23.458093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:09:23.539459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488560064829 != 1771488560064833 2026-02-19T08:09:23.549237Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:23.597304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:09:23.692187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:09:24.000358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:09:24.014065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:09:24.117507Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:09:24.117577Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:09:24.117747Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:09:24.286615Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:09:24.286733Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:09:24.287456Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:09:24.287599Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:09:24.287930Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:09:24.288034Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:09:24.288135Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:09:24.288361Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:09:24.290115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:09:24.291247Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:09:24.291317Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:09:24.323058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:09:24.324090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:09:24.324407Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:09:24.324682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:09:24.334349Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:09:24.371654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:09:24.371794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:09:24.373612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:09:24.374023Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:09:24.374113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:09:24.374494Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:09:24.374656Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:09:24.374759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:09:24.385721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:09:24.472079Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:09:24.472309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:09:24.472432Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:09:24.472473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:09:24.472524Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:09:24.472571Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:24.472817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:24.472879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:24.473302Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:09:24.473407Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:09:24.473486Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:24.473524Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:24.473574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:09:24.473611Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:24.473647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:24.474004Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:09:24.474098Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:24.474213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:24.474254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:09:24.474300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:09:24.474394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:09:24.474449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:09:24.474574Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:09:24.474828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:09:24.474880Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:09:24.474989Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... on unit ReadTableScan 2026-02-19T08:09:31.122746Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-02-19T08:09:31.122888Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2026-02-19T08:09:31.122907Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:31.122925Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:09:31.122945Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:31.122963Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:31.122999Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:31.123332Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435082, Sender [2:1080:2896], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:31.123358Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3246: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-02-19T08:09:31.123455Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-02-19T08:09:31.123665Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:31.123707Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-02-19T08:09:31.123768Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:31.123922Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:31.123993Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-02-19T08:09:31.124060Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-02-19T08:09:31.124098Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-02-19T08:09:31.124351Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:31.124372Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-02-19T08:09:31.124400Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:31.124440Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:31.124479Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-02-19T08:09:31.124512Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-02-19T08:09:31.124534Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-02-19T08:09:31.124654Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:31.124670Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-02-19T08:09:31.124692Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:31.124723Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:09:31.124749Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-02-19T08:09:31.124775Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-02-19T08:09:31.124794Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-02-19T08:09:31.124900Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1067:2884] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-02-19T08:09:31.124916Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1067:2884] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-02-19T08:09:31.124937Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:31.124993Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-02-19T08:09:31.125392Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1067:2884] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-02-19T08:09:31.125435Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1067:2884] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-02-19T08:09:31.125483Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:09:31.125510Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715662, at: 72075186224037888 2026-02-19T08:09:31.125628Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:892:2770], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:31.125671Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:09:31.125783Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:09:31.125842Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:09:31.125890Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2026-02-19T08:09:31.125937Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-02-19T08:09:31.125978Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2026-02-19T08:09:31.126027Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-02-19T08:09:31.126066Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2026-02-19T08:09:31.126101Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:09:31.126135Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-02-19T08:09:31.126172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2026-02-19T08:09:31.126202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:09:31.126240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:09:31.126274Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:09:31.126315Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-02-19T08:09:31.126341Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:09:31.126376Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2026-02-19T08:09:31.126416Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:09:31.126453Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:09:31.126493Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:09:31.126524Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:09:31.126587Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:09:31.126624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-02-19T08:09:31.126670Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:09:31.126748Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:09:31.126912Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1067:2884] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2026-02-19T08:09:31.126985Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1067:2884] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.015066s execute time: 0.129619s total time: 0.144685s 2026-02-19T08:09:31.127310Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1067:2884], Recipient [2:892:2770]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> QueryStats::Ranges [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_select.py::TestPgSelect::test_as_table [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |89.5%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/actors/ut_service/slow/core-kqp-federated_query-actors-ut_service-slow |89.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/actors/ut_service/slow/core-kqp-federated_query-actors-ut_service-slow |89.5%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |89.5%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/actors/ut_service/slow/core-kqp-federated_query-actors-ut_service-slow |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |89.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> test_restarts.py::test_basic |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |89.6%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |89.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |89.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_as_table [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |89.6%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> TCacheTest::WatchRoot >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> TSubscriberCombinationsTest::CombinationsMigratedPath >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] >> YdbSdkSessions::TestSessionPool [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |89.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now >> TCacheTest::RacyRecreateAndSync |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |89.7%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::Attributes >> TModifyUserTest::ModifyUser >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> TSubDomainTest::UserAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2026-02-19T08:09:57.901644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:57.901784Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 2 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-02-19T08:09:59.418510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:09:59.437454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TSubDomainTest::CreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> TCacheTest::CheckSystemViewAccess [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> KqpErrors::ProposeResultLost_RwTx+UseSink >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2026-02-19T08:10:00.883377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:00.883455Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-02-19T08:10:04.116236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-02-19T08:10:04.124427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:10:04.127751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2026-02-19T08:10:04.130983Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:431:2410], for# user1@builtin, access# DescribeSchema 2026-02-19T08:10:04.131631Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:437:2416], for# user1@builtin, access# >> KqpErrors::ProposeError |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> KqpErrors::ResolveTableError |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |89.7%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2026-02-19T08:10:01.289016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:01.289134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:01.293154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:01.355585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:01.431329Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:01.527157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:01.663415Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608485603469180831:2105] Handle TEvNavigate describe path dc-1 2026-02-19T08:10:01.663462Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608485607764148432:2267] HANDLE EvNavigateScheme dc-1 2026-02-19T08:10:01.663572Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608485603469180838:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:01.663664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608485607764148353:2219][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608485603469180838:2108], cookie# 1 2026-02-19T08:10:01.676294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607764148366:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148363:2219], cookie# 1 2026-02-19T08:10:01.676417Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485603469180534:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148366:2219], cookie# 1 2026-02-19T08:10:01.676459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607764148367:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148364:2219], cookie# 1 2026-02-19T08:10:01.676474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607764148368:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148365:2219], cookie# 1 2026-02-19T08:10:01.676518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607764148366:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485603469180534:2049], cookie# 1 2026-02-19T08:10:01.676558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485607764148353:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607764148363:2219], cookie# 1 2026-02-19T08:10:01.676583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608485607764148353:2219][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:01.676599Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485603469180537:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148367:2219], cookie# 1 2026-02-19T08:10:01.676615Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485603469180540:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607764148368:2219], cookie# 1 2026-02-19T08:10:01.676654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607764148367:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485603469180537:2052], cookie# 1 2026-02-19T08:10:01.676681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607764148368:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485603469180540:2055], cookie# 1 2026-02-19T08:10:01.676711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485607764148353:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607764148364:2219], cookie# 1 2026-02-19T08:10:01.676738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608485607764148353:2219][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:01.676766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485607764148353:2219][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607764148365:2219], cookie# 1 2026-02-19T08:10:01.676789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608485607764148353:2219][/dc-1] Sync cookie mismatch: sender# [1:7608485607764148365:2219], cookie# 1, current cookie# 0 2026-02-19T08:10:01.676846Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608485603469180838:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:01.683146Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608485603469180838:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608485607764148353:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:01.683270Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608485603469180838:2108], cacheItem# { Subscriber: { Subscriber: [1:7608485607764148353:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:10:01.690363Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608485607764148433:2268], recipient# [1:7608485607764148432:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:01.690476Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608485607764148432:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:01.754216Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608485607764148432:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:10:01.758291Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608485607764148432:2267] Handle TEvDescribeSchemeResult Forward to# [1:7608485607764148431:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:10:01.766331Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608485603469180831:2105] Handle TEvProposeTransaction 2026-02-19T08:10:01.766367Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608485603469180831:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:10:01.766425Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608485603469180831:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608485607764148438:2272] 2026-02-19T08:10:01.946140Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:02.066385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608485607764148438:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:10:02.066451Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608485607764148438:2272] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:10:02.066501Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608485607764148438:2272] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:10:02.066594Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEv ... RD_POPULATOR DEBUG: populator.cpp:758: [2:7608485629470195059:2246] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7608485629470195069:2254], cookie# 281474976715662 2026-02-19T08:10:07.500964Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [2:7608485629470195059:2246] Ack for unknown update (already acked?): sender# [2:7608485629470195069:2254], cookie# 281474976715662 2026-02-19T08:10:07.501145Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2026-02-19T08:10:07.501227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2026-02-19T08:10:07.501238Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2026-02-19T08:10:07.501261Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2026-02-19T08:10:07.501277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-02-19T08:10:07.501334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 0 2026-02-19T08:10:07.501698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2026-02-19T08:10:07.515908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2026-02-19T08:10:07.517899Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7608485629470194572:2090] Handle TEvNavigate describe path /dc-1/USER_0 2026-02-19T08:10:07.517935Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7608485633765162517:2363] HANDLE EvNavigateScheme /dc-1/USER_0 2026-02-19T08:10:07.518024Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608485629470194804:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:07.518104Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7608485633765162448:2310][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7608485629470194804:2114], cookie# 10 2026-02-19T08:10:07.518164Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608485633765162452:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162449:2310], cookie# 10 2026-02-19T08:10:07.518192Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608485633765162453:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162450:2310], cookie# 10 2026-02-19T08:10:07.518208Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608485633765162454:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162451:2310], cookie# 10 2026-02-19T08:10:07.518238Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608485629470194506:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162452:2310], cookie# 10 2026-02-19T08:10:07.518266Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608485629470194509:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162453:2310], cookie# 10 2026-02-19T08:10:07.518284Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608485629470194512:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7608485633765162454:2310], cookie# 10 2026-02-19T08:10:07.518338Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608485633765162452:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485629470194506:2049], cookie# 10 2026-02-19T08:10:07.518355Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608485633765162453:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485629470194509:2052], cookie# 10 2026-02-19T08:10:07.518374Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608485633765162454:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485629470194512:2055], cookie# 10 2026-02-19T08:10:07.518416Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608485633765162448:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485633765162449:2310], cookie# 10 2026-02-19T08:10:07.518461Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7608485633765162448:2310][/dc-1/USER_0] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:07.518509Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608485633765162448:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485633765162450:2310], cookie# 10 2026-02-19T08:10:07.518535Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7608485633765162448:2310][/dc-1/USER_0] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:07.518571Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608485633765162448:2310][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7608485633765162451:2310], cookie# 10 2026-02-19T08:10:07.518584Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7608485633765162448:2310][/dc-1/USER_0] Sync cookie mismatch: sender# [2:7608485633765162451:2310], cookie# 10, current cookie# 0 2026-02-19T08:10:07.518629Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7608485629470194804:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-02-19T08:10:07.518698Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7608485629470194804:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7608485633765162448:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771488607427 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:07.518782Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608485629470194804:2114], cacheItem# { Subscriber: { Subscriber: [2:7608485633765162448:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771488607427 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-02-19T08:10:07.518960Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608485633765162518:2364], recipient# [2:7608485633765162517:2363], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:07.518995Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7608485633765162517:2363] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:07.519055Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7608485633765162517:2363] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-02-19T08:10:07.519756Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7608485633765162517:2363] Handle TEvDescribeSchemeResult Forward to# [2:7608485633765162516:2362] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1771488607427 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::TestZeroRange [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2026-02-19T08:10:15.693310Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-02-19T08:10:15.693704Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-02-19T08:10:15.694368Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-02-19T08:10:15.695650Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.696045Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-02-19T08:10:15.704865Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.705022Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.705062Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.705129Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-02-19T08:10:15.705208Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.705258Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-02-19T08:10:15.705425Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-02-19T08:10:15.706349Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#0 2026-02-19T08:10:15.706763Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.706847Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:15.706943Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2026-02-19T08:10:15.707003Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 0 expected SUCCESS |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |89.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> TTxAllocatorClientTest::Boot |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |89.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes >> TTxAllocatorClientTest::Boot [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2026-02-19T08:10:01.053536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:01.053684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:01.056846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:01.144097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:01.243125Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:01.441295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:01.566536Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608485602873765720:2105] Handle TEvNavigate describe path dc-1 2026-02-19T08:10:01.566602Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608485607168733323:2267] HANDLE EvNavigateScheme dc-1 2026-02-19T08:10:01.566721Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608485602873765727:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:01.566804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608485602873765908:2205][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608485602873765727:2108], cookie# 1 2026-02-19T08:10:01.568578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607168733256:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733253:2205], cookie# 1 2026-02-19T08:10:01.568617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607168733257:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733254:2205], cookie# 1 2026-02-19T08:10:01.568636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485607168733258:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733255:2205], cookie# 1 2026-02-19T08:10:01.568677Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485602873765423:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733256:2205], cookie# 1 2026-02-19T08:10:01.568705Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485602873765426:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733257:2205], cookie# 1 2026-02-19T08:10:01.568719Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485602873765429:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485607168733258:2205], cookie# 1 2026-02-19T08:10:01.568790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607168733256:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485602873765423:2049], cookie# 1 2026-02-19T08:10:01.568821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607168733257:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485602873765426:2052], cookie# 1 2026-02-19T08:10:01.568863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485607168733258:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485602873765429:2055], cookie# 1 2026-02-19T08:10:01.568915Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485602873765908:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607168733253:2205], cookie# 1 2026-02-19T08:10:01.568948Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608485602873765908:2205][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:01.568969Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485602873765908:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607168733254:2205], cookie# 1 2026-02-19T08:10:01.568989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608485602873765908:2205][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:01.569013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485602873765908:2205][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485607168733255:2205], cookie# 1 2026-02-19T08:10:01.569026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608485602873765908:2205][/dc-1] Sync cookie mismatch: sender# [1:7608485607168733255:2205], cookie# 1, current cookie# 0 2026-02-19T08:10:01.569087Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608485602873765727:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:01.586384Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608485602873765727:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608485602873765908:2205] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:01.586552Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608485602873765727:2108], cacheItem# { Subscriber: { Subscriber: [1:7608485602873765908:2205] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:10:01.589418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608485607168733324:2268], recipient# [1:7608485607168733323:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:01.589514Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608485607168733323:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:01.625038Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608485607168733323:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:10:01.629701Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608485607168733323:2267] Handle TEvDescribeSchemeResult Forward to# [1:7608485607168733322:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:10:01.642138Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608485602873765720:2105] Handle TEvProposeTransaction 2026-02-19T08:10:01.642169Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608485602873765720:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:10:01.642235Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608485602873765720:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608485607168733329:2272] 2026-02-19T08:10:01.806025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:01.835987Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608485607168733329:2272] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:10:01.836074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608485607168733329:2272] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:10:01.836149Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608485607168733329:2272] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:10:01.836276Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEv ... omainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2026-02-19T08:10:14.228712Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608485662780951763:2340], recipient# [3:7608485662780951762:2339], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-02-19T08:10:14.228775Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [3:7608485662780951762:2339] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:14.228822Z node 3 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [3:7608485662780951762:2339] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2026-02-19T08:10:14.228906Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608485662780951762:2339] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:10:14.228934Z node 3 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [3:7608485662780951762:2339] txid# 281474976710662 SEND to# [3:7608485662780951761:2338] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:10:14.230137Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7608485658485984080:2106] Handle TEvProposeTransaction 2026-02-19T08:10:14.230164Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7608485658485984080:2106] TxId# 281474976710663 ProcessProposeTransaction 2026-02-19T08:10:14.230194Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [3:7608485658485984080:2106] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7608485662780951765:2342] 2026-02-19T08:10:14.232267Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [3:7608485662780951765:2342] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMTgxNCwiaWF0IjoxNzcxNDg4NjE0LCJzdWIiOiJ1c2VyMiJ9.RP54HzxGFtnxNQhbUDyQyH21XdMw8cxAitOTX-j-XD_AOy-KYPEDrHbQ_PrCnLFsGObtXnN4FCSZmYebC6xA2hM4Di8nn05iFQlrybLGRLvKQDsyoCpgErhZtOW_tNa58AFat3XMepMNdg2ipADgi2zogsz8sHJlqETVU89lHfx3vZm2jIi27cHoYl2yWGTe7Ki09kOD1AIet3im8GLw2OdM8HsNu4C4cPrhtQZaBVr4XHEinPYdbqpde3CS3qvECzFWShocOthmHdfUx29bl-WQg0SwsHDhBjU-1jBLYH9g9S86CzIl0Q7mvv4oDx2Z-h7ZOX8k3vPWpCQB8otNaA\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMTgxNCwiaWF0IjoxNzcxNDg4NjE0LCJzdWIiOiJ1c2VyMiJ9.**0\000" PeerName: "" 2026-02-19T08:10:14.232318Z node 3 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [3:7608485662780951765:2342] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:10:14.232337Z node 3 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [3:7608485662780951765:2342] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2026-02-19T08:10:14.232421Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [3:7608485662780951765:2342] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:10:14.232495Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608485658485984156:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:14.232578Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:7608485658485984323:2233][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7608485658485984156:2145], cookie# 10 2026-02-19T08:10:14.232646Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7608485658485984333:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984330:2233], cookie# 10 2026-02-19T08:10:14.232666Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7608485658485984334:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984331:2233], cookie# 10 2026-02-19T08:10:14.232685Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7608485658485984335:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984332:2233], cookie# 10 2026-02-19T08:10:14.232729Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7608485658485983783:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984333:2233], cookie# 10 2026-02-19T08:10:14.232767Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7608485658485983786:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984334:2233], cookie# 10 2026-02-19T08:10:14.232786Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7608485658485983789:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7608485658485984335:2233], cookie# 10 2026-02-19T08:10:14.232824Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7608485658485984333:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485983783:2049], cookie# 10 2026-02-19T08:10:14.232843Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7608485658485984334:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485983786:2052], cookie# 10 2026-02-19T08:10:14.232861Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7608485658485984335:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485983789:2055], cookie# 10 2026-02-19T08:10:14.232896Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7608485658485984323:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485984330:2233], cookie# 10 2026-02-19T08:10:14.232920Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:7608485658485984323:2233][/dc-1] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:14.232948Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7608485658485984323:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485984331:2233], cookie# 10 2026-02-19T08:10:14.232972Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:7608485658485984323:2233][/dc-1] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:14.233017Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7608485658485984323:2233][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7608485658485984332:2233], cookie# 10 2026-02-19T08:10:14.233034Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7608485658485984323:2233][/dc-1] Sync cookie mismatch: sender# [3:7608485658485984332:2233], cookie# 10, current cookie# 0 2026-02-19T08:10:14.233081Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7608485658485984156:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:14.233168Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7608485658485984156:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7608485658485984323:2233] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771488613944 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:14.233264Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608485658485984156:2145], cacheItem# { Subscriber: { Subscriber: [3:7608485658485984323:2233] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771488613944 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-02-19T08:10:14.233509Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608485662780951766:2343], recipient# [3:7608485662780951765:2342], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-02-19T08:10:14.233551Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [3:7608485662780951765:2342] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:14.233598Z node 3 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [3:7608485662780951765:2342] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2026-02-19T08:10:14.233679Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608485662780951765:2342] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:10:14.233705Z node 3 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [3:7608485662780951765:2342] txid# 281474976710663 SEND to# [3:7608485662780951764:2341] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:10:14.289890Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> KqpErrors::ResolveTableError [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2026-02-19T08:10:20.087198Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-02-19T08:10:20.087766Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-02-19T08:10:20.088572Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-02-19T08:10:20.091835Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:20.092486Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-02-19T08:10:20.105244Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:20.105545Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:20.105626Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:20.105836Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-02-19T08:10:20.105982Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:10:20.106088Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-02-19T08:10:20.106275Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:10:14.976728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:14.978135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:15.172279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:15.178403Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:15.666491Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:15.810875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:15.810998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:15.811481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:15.811549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:15.856633Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:10:15.857131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:15.857530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:15.941637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:10:15.969606Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:10:16.770609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:16.845776Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:16.845925Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:17.231859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:10:19.543415Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:10:19.543565Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:10:19.543986Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-02-19T08:10:19.544109Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:10:19.544453Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710658. Resolved key sets: 1 2026-02-19T08:10:19.544682Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:10:19.544947Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2026-02-19T08:10:19.545001Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:10:19.545230Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:10:19.545679Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1870:3217] 2026-02-19T08:10:19.546266Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1870:3217], channels: 0 2026-02-19T08:10:19.546387Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:10:19.546460Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:10:19.546509Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1870:3217] 2026-02-19T08:10:19.546564Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1870:3217], channels: 0 2026-02-19T08:10:19.546665Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1870:3217], trace_id# 2026-02-19T08:10:19.546737Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1870:3217], trace_id# 2026-02-19T08:10:19.546801Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:10:19.564656Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1870:3217] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:10:19.564809Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1870:3217], trace_id# 2026-02-19T08:10:19.564895Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1870:3217], trace_id# 2026-02-19T08:10:19.574451Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1870:3217] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2031 Tasks { TaskId: 1 CpuTimeUs: 1161 FinishTimeMs: 1771488619573 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 1109 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771488619547 UpdateTimeMs: 1771488619573 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:19.574560Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1870:3217] 2026-02-19T08:10:19.574661Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:1866:3217] trace_id# 2026-02-19T08:10:19.574750Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002031s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:10:19.611535Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:10:19.611617Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:1867:3217] TxId: 281474976710658. Ctx: { TraceId: 01khtf6a4p48c9k23en47hcefy, Database: , SessionId: ydb://session/3?node_id=1&id=MjFhY2FlNy00MWQyZTA1Yy05ZGY0NGM4OC0yNDA1ZGM2YQ==, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:10:19.661457Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:1885:3235], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2026-02-19T08:10:19.663806Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZjA3ZTNkNjAtN2I1MjBhNTYtNzViZTQ0ZmItNDE2ZmNhM2U=, ActorId: [1:1883:3233], ActorState: ExecuteState, LegacyTraceId: 01khtf6b3028d64d8kgggxrebs, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[/Root/table-1]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> ConfiguredTabletBootstrapperTest::BasicInitialization >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TPipeCacheTest::TestIdleRefresh >> ConfiguredTabletBootstrapperTest::BasicInitialization [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges >> TabletState::SeqNoSubscriptionReplace >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TabletState::SeqNoSubscriptionReplace [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] Test command err: 2026-02-19T08:10:05.846815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:05.855468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:05.855545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:05.869178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:05.991345Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:05.996306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485624820520382:2081] 1771488605406539 != 1771488605406542 2026-02-19T08:10:06.084216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:06.209403Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608485624820520654:2107] Handle TEvNavigate describe path dc-1 2026-02-19T08:10:06.209455Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608485629115488255:2269] HANDLE EvNavigateScheme dc-1 2026-02-19T08:10:06.209561Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608485624820520662:2110], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:06.209662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608485624820520839:2208][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608485624820520662:2110], cookie# 1 2026-02-19T08:10:06.211506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485624820520887:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520884:2208], cookie# 1 2026-02-19T08:10:06.211543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485624820520888:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520885:2208], cookie# 1 2026-02-19T08:10:06.211561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485624820520889:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520886:2208], cookie# 1 2026-02-19T08:10:06.211610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485624820520350:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520887:2208], cookie# 1 2026-02-19T08:10:06.211665Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485624820520353:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520888:2208], cookie# 1 2026-02-19T08:10:06.211685Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485624820520356:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485624820520889:2208], cookie# 1 2026-02-19T08:10:06.211730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485624820520887:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520350:2049], cookie# 1 2026-02-19T08:10:06.211746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485624820520888:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520353:2052], cookie# 1 2026-02-19T08:10:06.211760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485624820520889:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520356:2055], cookie# 1 2026-02-19T08:10:06.211803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485624820520839:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520884:2208], cookie# 1 2026-02-19T08:10:06.211828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608485624820520839:2208][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:06.211844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485624820520839:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520885:2208], cookie# 1 2026-02-19T08:10:06.211872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608485624820520839:2208][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:06.211917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485624820520839:2208][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485624820520886:2208], cookie# 1 2026-02-19T08:10:06.211937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608485624820520839:2208][/dc-1] Sync cookie mismatch: sender# [1:7608485624820520886:2208], cookie# 1, current cookie# 0 2026-02-19T08:10:06.211999Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608485624820520662:2110], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:06.226303Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608485624820520662:2110], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608485624820520839:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:06.226430Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608485624820520662:2110], cacheItem# { Subscriber: { Subscriber: [1:7608485624820520839:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:10:06.229099Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608485629115488256:2270], recipient# [1:7608485629115488255:2269], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:06.229192Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608485629115488255:2269] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:06.282303Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608485629115488255:2269] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:10:06.286007Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608485629115488255:2269] Handle TEvDescribeSchemeResult Forward to# [1:7608485629115488254:2268] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:10:06.287820Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608485624820520654:2107] Handle TEvProposeTransaction 2026-02-19T08:10:06.287861Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608485624820520654:2107] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:10:06.287920Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608485624820520654:2107] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608485629115488261:2274] 2026-02-19T08:10:06.426923Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608485629115488261:2274] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:10:06.426996Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608485629115488261:2274] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:10:06.427060Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608485629115488261:2274] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:10:06.427736Z node 1 :TX_PROXY_SCHEME_CAC ... ersion: 0 }: sender# [5:7608485693510696210:2371] 2026-02-19T08:10:22.167048Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][5:7608485693510696205:2371][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [5:7608485672035859280:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.167065Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][5:7608485693510696205:2371][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7608485693510696211:2371] 2026-02-19T08:10:22.167086Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][5:7608485693510696205:2371][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [5:7608485672035859280:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.167174Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [5:7608485693510696206:2370] 2026-02-19T08:10:22.167199Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [5:7608485672035859280:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.167218Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [5:7608485693510696207:2370] 2026-02-19T08:10:22.167242Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [5:7608485672035859280:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.167258Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [5:7608485693510696208:2370] 2026-02-19T08:10:22.167282Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][5:7608485693510696204:2370][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [5:7608485672035859280:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.377819Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7608485675281178257:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:22.377955Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7608485675281178257:2109], cacheItem# { Subscriber: { Subscriber: [4:7608485692461047667:2221] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:10:22.378009Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7608485675281178257:2109], cacheItem# { Subscriber: { Subscriber: [4:7608485692461047668:2222] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:10:22.378131Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7608485696756015010:2226], recipient# [4:7608485692461047664:2310], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:22.378637Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7608485692461047664:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:10:22.481296Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608485692461047670:2222] 2026-02-19T08:10:22.481380Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.481418Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608485692461047671:2222] 2026-02-19T08:10:22.481438Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.481456Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608485692461047674:2222] 2026-02-19T08:10:22.481490Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047668:2222][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.481570Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608485692461047669:2221] 2026-02-19T08:10:22.481594Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.481611Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608485692461047672:2221] 2026-02-19T08:10:22.481630Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:22.481645Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608485692461047673:2221] 2026-02-19T08:10:22.481664Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608485692461047667:2221][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608485675281178257:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> TPipeCacheTest::TestTabletNode [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletResolver::NodeProblem |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] >> TTabletResolver::NodeProblem [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2026-02-19T08:10:26.132094Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.132334Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 123 leader: [1:217:2139] followers: 0 2026-02-19T08:10:26.132464Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [1:217:2139] 2026-02-19T08:10:26.132910Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.133125Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 234 leader: [1:223:2143] followers: 0 2026-02-19T08:10:26.133174Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [1:223:2143] 2026-02-19T08:10:26.135486Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:217:2139] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.135548Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [1:217:2139] 2026-02-19T08:10:26.135736Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:223:2143] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.135785Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [1:223:2143] 2026-02-19T08:10:26.135967Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:986: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 5 2026-02-19T08:10:26.136014Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:889: Delayed invalidation of tabletId: 123 leader: [1:217:2139] by nodeId 2026-02-19T08:10:26.136057Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:217:2139] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.136136Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:10:26.136408Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 123 leader: [2:233:2096] followers: 0 2026-02-19T08:10:26.136489Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [2:233:2096] 2026-02-19T08:10:26.136921Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:889: Delayed invalidation of tabletId: 234 leader: [1:223:2143] by nodeId 2026-02-19T08:10:26.136975Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:223:2143] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.137016Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:10:26.137234Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 234 leader: [2:239:2098] followers: 0 2026-02-19T08:10:26.137324Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [2:239:2098] 2026-02-19T08:10:26.139238Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:986: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2026-02-19T08:10:26.139337Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:233:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.139435Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [2:233:2096] 2026-02-19T08:10:26.139669Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:239:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.139725Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [2:239:2098] 2026-02-19T08:10:26.139911Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:986: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2026-02-19T08:10:26.139954Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:889: Delayed invalidation of tabletId: 123 leader: [2:233:2096] by nodeId 2026-02-19T08:10:26.140066Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:233:2096] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.140128Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:10:26.140387Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 123 leader: [3:251:2096] followers: 0 2026-02-19T08:10:26.140442Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [3:251:2096] 2026-02-19T08:10:26.140879Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:239:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.140948Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [2:239:2098] 2026-02-19T08:10:26.141172Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:986: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 8 2026-02-19T08:10:26.141249Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:251:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.141303Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 123 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [3:251:2096] 2026-02-19T08:10:26.141577Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:889: Delayed invalidation of tabletId: 234 leader: [2:239:2098] by nodeId 2026-02-19T08:10:26.141637Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:239:2098] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:10:26.141675Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: 1 leaderDC: 2 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:10:26.142165Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 234 leader: [3:257:2098] followers: 0 2026-02-19T08:10:26.142253Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 234 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [3:257:2098] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] Test command err: 2026-02-19T08:10:23.650330Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-02-19T08:10:23.650500Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723360 bootstrapper on node 1 2026-02-19T08:10:23.650548Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 1 2026-02-19T08:10:23.655701Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:23.656670Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-02-19T08:10:23.656727Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-02-19T08:10:24.779731Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-02-19T08:10:24.779814Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-02-19T08:10:24.779880Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723361 bootstrapper on node 4 2026-02-19T08:10:24.779927Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:24.779964Z node 6 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 6 2026-02-19T08:10:24.780003Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-02-19T08:10:24.780241Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-02-19T08:10:24.780267Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-02-19T08:10:24.989588Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 4 2026-02-19T08:10:24.990804Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-02-19T08:10:24.990874Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:71: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-02-19T08:10:24.991002Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723361 bootstrapper on node 4 2026-02-19T08:10:24.991709Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:24.992376Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 5 2026-02-19T08:10:24.992427Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-02-19T08:10:24.992543Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723361 bootstrapper on node 5 2026-02-19T08:10:24.992904Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:24.993320Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:282:2143] 2026-02-19T08:10:24.993749Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-02-19T08:10:24.993793Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 11436890671405667697 2026-02-19T08:10:24.994120Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:282:2143] 2026-02-19T08:10:24.995066Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 5 state: UNKNOWN 2026-02-19T08:10:24.995119Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-02-19T08:10:24.995673Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-02-19T08:10:24.995709Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 5380068269072606170 2026-02-19T08:10:24.996233Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 4 state: OWNER 2026-02-19T08:10:24.996268Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 72075186232723361, type: Dummy, become watch on node 4 (owner) 2026-02-19T08:10:25.141990Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 4 2026-02-19T08:10:25.142127Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-02-19T08:10:25.142172Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:71: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-02-19T08:10:25.143532Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 5 2026-02-19T08:10:25.143608Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-02-19T08:10:25.143649Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:71: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-02-19T08:10:25.143749Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723361 bootstrapper on node 5 2026-02-19T08:10:25.143853Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:25.143941Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:25.144992Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:336:2166] 2026-02-19T08:10:25.145360Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-02-19T08:10:25.145415Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-02-19T08:10:25.450829Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 5 2026-02-19T08:10:25.450951Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-02-19T08:10:25.451008Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:71: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-02-19T08:10:25.452668Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:83: Received bootstrap config update on node 7 2026-02-19T08:10:25.452728Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:99: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-02-19T08:10:25.452827Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:144: Started tablet 72075186232723361 bootstrapper on node 7 2026-02-19T08:10:25.452956Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-02-19T08:10:25.453477Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [5:380:2102] 2026-02-19T08:10:25.454230Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-02-19T08:10:25.454269Z node 7 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot >> TTabletPipeTest::TestConsumerSidePipeReset >> BootstrapperTest::RestartUnavailableTablet >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |89.8%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestConnectReject |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |89.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2026-02-19T08:09:52.547734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:52.547816Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-02-19T08:09:54.102657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2026-02-19T08:10:04.850977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:04.805645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:05.638335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:05.793898Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:05.874147Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:05.944623Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:05.981367Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:05.981512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:06.159293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:06.192713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:06.192797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:06.230198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:06.230283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:06.239408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:06.239481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:06.310764Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:10:06.310804Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:10:06.310961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:06.314993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:06.322477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:06.523773Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:06.534412Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:06.562591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:06.563236Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:07.711870Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608485621502685052:2146] Handle TEvNavigate describe path dc-1 2026-02-19T08:10:07.711921Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608485634387587433:2468] HANDLE EvNavigateScheme dc-1 2026-02-19T08:10:07.712032Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608485621502685083:2160], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:07.712130Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608485625797652595:2304][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608485621502685083:2160], cookie# 1 2026-02-19T08:10:07.713889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485625797652652:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652649:2304], cookie# 1 2026-02-19T08:10:07.713929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485625797652653:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652650:2304], cookie# 1 2026-02-19T08:10:07.713950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485625797652654:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652651:2304], cookie# 1 2026-02-19T08:10:07.713991Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485617207717392:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652652:2304], cookie# 1 2026-02-19T08:10:07.714022Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485617207717395:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652653:2304], cookie# 1 2026-02-19T08:10:07.714057Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485617207717398:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485625797652654:2304], cookie# 1 2026-02-19T08:10:07.714109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485625797652652:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485617207717392:2053], cookie# 1 2026-02-19T08:10:07.714127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485625797652653:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485617207717395:2056], cookie# 1 2026-02-19T08:10:07.714141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485625797652654:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485617207717398:2059], cookie# 1 2026-02-19T08:10:07.714195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485625797652595:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485625797652649:2304], cookie# 1 2026-02-19T08:10:07.714225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608485625797652595:2304][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:07.714243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485625797652595:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485625797652650:2304], cookie# 1 2026-02-19T08:10:07.714279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608485625797652595:2304][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:07.714320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485625797652595:2304][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485625797652651:2304], cookie# 1 2026-02-19T08:10:07.714335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608485625797652595:2304][/dc-1] Sync cookie mismatch: sender# [1:7608485625797652651:2304], cookie# 1, current cookie# 0 2026-02-19T08:10:07.714388Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608485621502685083:2160], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:07.720437Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608485621502685083:2160], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608485625797652595:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:07.720559Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608485621502685083:2160], cacheItem# { Subscriber: { Subscriber: [1:7608485625797652595:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:10:07.739485Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608485634387587434:2469], recipient# [1:7608485634387587433:2468], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:07.739601Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608485634387587433:2468] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:07.810805Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608485634387587433:2468] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPr ... 2057594046644480 }: sender# [7:7608485710236314944:2841] 2026-02-19T08:10:25.610933Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7608485693056444465:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2026-02-19T08:10:25.610954Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7608485693056444465:2056] Subscribe: subscriber# [7:7608485710236314944:2841], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:10:25.610979Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [7:7608485693056444465:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [7:7608485710236314945:2842] 2026-02-19T08:10:25.610986Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [7:7608485693056444465:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-02-19T08:10:25.611003Z node 7 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [7:7608485693056444465:2056] Subscribe: subscriber# [7:7608485710236314945:2842], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:10:25.611042Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314940:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485693056444459:2050] 2026-02-19T08:10:25.611063Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314942:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485693056444462:2053] 2026-02-19T08:10:25.611080Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314944:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485693056444465:2056] 2026-02-19T08:10:25.611123Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314932:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485710236314934:2841] 2026-02-19T08:10:25.611170Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314932:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485710236314935:2841] 2026-02-19T08:10:25.611200Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][7:7608485710236314932:2841][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [7:7608485693056444792:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:25.611221Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314932:2841][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [7:7608485710236314936:2841] 2026-02-19T08:10:25.611244Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][7:7608485710236314932:2841][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [7:7608485693056444792:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:25.611264Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314941:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485693056444459:2050] 2026-02-19T08:10:25.611282Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314943:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485693056444462:2053] 2026-02-19T08:10:25.611325Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][7:7608485710236314945:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485693056444465:2056] 2026-02-19T08:10:25.611357Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314933:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485710236314937:2842] 2026-02-19T08:10:25.611387Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314933:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485710236314938:2842] 2026-02-19T08:10:25.611406Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][7:7608485710236314933:2842][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [7:7608485693056444792:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:25.611422Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7608485710236314933:2842][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [7:7608485710236314939:2842] 2026-02-19T08:10:25.611441Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][7:7608485710236314933:2842][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [7:7608485693056444792:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:25.611466Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444459:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314940:2841] 2026-02-19T08:10:25.611481Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444459:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314941:2842] 2026-02-19T08:10:25.611492Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444462:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314942:2841] 2026-02-19T08:10:25.611504Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444462:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314943:2842] 2026-02-19T08:10:25.611515Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444465:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314944:2841] 2026-02-19T08:10:25.611527Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7608485693056444465:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7608485710236314945:2842] 2026-02-19T08:10:25.611598Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7608485693056444792:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-02-19T08:10:25.611709Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7608485693056444792:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7608485710236314932:2841] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:25.611839Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7608485693056444792:2146], cacheItem# { Subscriber: { Subscriber: [7:7608485710236314932:2841] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:10:25.611874Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7608485693056444792:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-02-19T08:10:25.611929Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7608485693056444792:2146], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7608485710236314933:2842] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:25.611993Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7608485693056444792:2146], cacheItem# { Subscriber: { Subscriber: [7:7608485710236314933:2842] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:10:25.612083Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7608485710236314946:2843], recipient# [7:7608485710236314929:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TSchemeShardSysNames::CreateOpsAreCovered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2026-02-19T08:10:30.034894Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2026-02-19T08:10:30.035528Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2026-02-19T08:10:30.035575Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2026-02-19T08:10:30.191513Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:10:13.770066Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:13.771076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:14.025628Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:689:2350], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:10:14.196139Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:10:14.202874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:10:14.203500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:14.203561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:14.205328Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:14.205420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:14.788177Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:14.928810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:14.928957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:14.929384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:14.929491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:14.981896Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:10:14.982887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:14.983287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:15.084588Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:10:15.148946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:10:15.815575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:15.874248Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:15.874352Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:16.239392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:10:17.403896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1924:3235], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:17.404036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1935:3240], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:17.404477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:17.405272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1940:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:17.405521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:17.414384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:10:18.073350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1938:3243], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:10:18.323891Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2049:3321] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:10:18.866671Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:10:18.866836Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:10:18.867193Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:2075:3233] TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-02-19T08:10:18.867281Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:2075:3233] TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:10:18.867544Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710660. Resolved key sets: 1 2026-02-19T08:10:18.867743Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:10:18.867987Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2026-02-19T08:10:18.868069Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:10:18.868277Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:10:18.868674Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:2078:3233] 2026-02-19T08:10:18.868747Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:2078:3233], channels: 0 2026-02-19T08:10:18.868836Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:2075:3233] TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:10:18.868883Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:2075:3233] TxId: 281474976710660. Ctx: { TraceId: 01khtf68xs2mbjaxejrjn6x86x, Database: , SessionId: ydb://session/3?node_id=1&id=YzBmMGE2MTUtYjA1MmVjYzUtY2U4ZDhhZDQtNDYzN2UwODQ=, Poo ... [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2130:3365], trace_id# 2026-02-19T08:10:32.672271Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2130:3365], trace_id# 2026-02-19T08:10:32.673136Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1046} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing task TaskId# 1 ComputeActor# [4:2131:2478] trace_id# 2026-02-19T08:10:32.673216Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [4:2131:2478] 2026-02-19T08:10:32.673288Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:908: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task: 1, output channelId: 1, dst task: 2, at actor [3:2130:3365] 2026-02-19T08:10:32.673351Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2130:3365], channels: 1 2026-02-19T08:10:32.673438Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:2131:2478], channels: 1 2026-02-19T08:10:32.673839Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2131:2478] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:10:32.673902Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.673962Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.675059Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2131:2478] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 1256 Tasks { TaskId: 1 CpuTimeUs: 777 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 760 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771488632670 CurrentWaitInputTimeUs: 25 UpdateTimeMs: 1771488632671 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:32.675125Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.675180Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.694900Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2131:2478] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 2040 Tasks { TaskId: 1 CpuTimeUs: 1047 OutputRows: 3 OutputBytes: 13 IngressRows: 3 ComputeCpuTimeUs: 287 BuildCpuTimeUs: 760 WaitInputTimeUs: 12511 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771488632681 CreateTimeMs: 1771488632670 CurrentWaitInputTimeUs: 12608 UpdateTimeMs: 1771488632694 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:32.695001Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.695059Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:2130:3365], CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.699098Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2130:3365] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1452 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1038 FinishTimeMs: 1771488632698 InputRows: 3 InputBytes: 14 OutputRows: 3 OutputBytes: 14 ResultRows: 3 ResultBytes: 14 ComputeCpuTimeUs: 314 BuildCpuTimeUs: 724 HostName: "ghrun-fkrokuvqke" NodeId: 3 CreateTimeMs: 1771488632668 UpdateTimeMs: 1771488632698 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:32.699198Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2130:3365] 2026-02-19T08:10:32.699282Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.699331Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:2131:2478], trace_id# 2026-02-19T08:10:32.699891Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2131:2478] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2455 DurationUs: 17000 Tasks { TaskId: 1 CpuTimeUs: 1134 FinishTimeMs: 1771488632698 OutputRows: 3 OutputBytes: 14 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 374 BuildCpuTimeUs: 760 WaitInputTimeUs: 15273 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771488632681 CreateTimeMs: 1771488632670 UpdateTimeMs: 1771488632698 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:32.699991Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:2131:2478] 2026-02-19T08:10:32.700160Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:10:32.700255Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:10:32.700316Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [3:2122:3365] TxId: 281474976715663. Ctx: { TraceId: 01khtf6qmmcs7gkmt2r9rsap08, Database: , SessionId: ydb://session/3?node_id=3&id=MTM3OWY0M2ItNDVlNTEzYzUtZDYxZWUzYTAtNTljMzNiNDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.003907s ReadRows: 3 ReadBytes: 24 RequestUnits# 3 ForceFlag# true trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } >> TSchemeShardServerLess::StorageBillingLabels [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:11.663292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:11.663430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.663553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:11.663597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:11.663630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:11.663663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:11.663689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:11.663713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:11.663780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:11.663848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:11.664704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:11.665026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:11.750389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:11.750445Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:11.768418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:11.769272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:11.769508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:11.781348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:11.781994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:11.782680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:11.783012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:11.786273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.786441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:11.787552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:11.787615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:11.787720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:11.787757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:11.787836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:11.788016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:11.968823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.970951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.971008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:11.971102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... .cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:03.765415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:08.193910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:08.194059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2026-02-19T08:10:08.194139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:08.282234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:10:08.282344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:10:08.282400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:10:08.425986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-02-19T08:10:08.426107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-02-19T08:10:08.426178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-02-19T08:10:08.478074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-02-19T08:10:08.478189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-02-19T08:10:08.478256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-02-19T08:10:08.530404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-02-19T08:10:08.580460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:08.580661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:08.580723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:08.580839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:08.593980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:13.018563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-02-19T08:10:13.070342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:13.070570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:13.070669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:13.070797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:13.081314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:17.749926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-02-19T08:10:17.797007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:17.797221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:17.797309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:17.797445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:17.809617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:22.414545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-02-19T08:10:22.461647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:22.461880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:22.461940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:22.462037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:22.476802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:27.014208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-02-19T08:10:27.058411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:27.058607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:27.058664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:27.058766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:27.069217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:31.512643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-02-19T08:10:31.557443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-02-19T08:10:31.557657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-02-19T08:10:31.557741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-02-19T08:10:31.557867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-02-19T08:10:31.568338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-02-19T08:10:35.590272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:35.590615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-39-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2026-02-19T08:10:35.598204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) >> KqpErrors::ProposeErrorEvWrite [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:10:15.565606Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:15.566753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:15.567630Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:10:15.692648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:10:15.693908Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:10:15.699011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:10:15.699297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:15.699546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:15.701142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:15.701305Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:16.187611Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:16.349052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:16.349245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:16.350184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:16.350252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:16.395656Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:10:16.396305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:16.396749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:16.492014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:10:16.548319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:10:17.482368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:17.556128Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:17.556274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:17.945532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:10:19.110482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1927:3238], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:19.110723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1938:3243], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:19.110939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:19.112409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1943:3248], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:19.112745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:10:19.129288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:10:19.853898Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1941:3246], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:10:20.104581Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2052:3324] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:10:20.478847Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:10:20.478972Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:10:20.479022Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:10:20.479070Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-02-19T08:10:20.479133Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:10:20.482075Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:10:20.491941Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:10:20.492076Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionId: ydb://session/3?node_id=1&id=ZDM2YmJiOWUtMTczYzQ0OTItNWZjZjU0NWItZDk3NzM4Njg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 299.418189s cancel_after# trace_id# 2026-02-19T08:10:20.492146Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionId: ydb://session/3?node_id=1&id=ZDM2YmJiOWUtMTczYzQ0OTItNWZjZjU0NWItZDk3NzM4Njg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:10:20.492240Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionId: ydb://session/3?node_id=1&id=ZDM2YmJiOWUtMTczYzQ0OTItNWZjZjU0NWItZDk3NzM4Njg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-02-19T08:10:20.492304Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:10:20.492921Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionId: ydb://session/3?node_id=1&id=ZDM2YmJiOWUtMTczYzQ0OTItNWZjZjU0NWItZDk3NzM4Njg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:10:20.493159Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:10:20.493293Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionId: ydb://session/3?node_id=1&id=ZDM2YmJiOWUtMTczYzQ0OTItNWZjZjU0NWItZDk3NzM4Njg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:10:20.493637Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:2078:3236] TxId: 281474976710660. Ctx: { TraceId: 01khtf6ajsaw416amg9za1tbg3, Database: , SessionI ... 2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:10:36.655922Z node 3 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:10:36.655980Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [3:2335:3468] 2026-02-19T08:10:36.656036Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2335:3468], channels: 0 2026-02-19T08:10:36.656107Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2335:3468], trace_id# 2026-02-19T08:10:36.656178Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2335:3468], trace_id# 2026-02-19T08:10:36.656237Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:10:36.657285Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2335:3468] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:10:36.657368Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2335:3468], trace_id# 2026-02-19T08:10:36.657457Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2335:3468], trace_id# 2026-02-19T08:10:36.658911Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2335:3468] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 725 Tasks { TaskId: 1 CpuTimeUs: 145 FinishTimeMs: 1771488636658 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 119 HostName: "ghrun-fkrokuvqke" NodeId: 3 CreateTimeMs: 1771488636656 UpdateTimeMs: 1771488636658 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:10:36.658984Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2335:3468] 2026-02-19T08:10:36.659067Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [3:2331:3468] trace_id# 2026-02-19T08:10:36.659141Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000725s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:10:36.678002Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:889: SelfId: [3:2337:3468], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:2322:3468]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2337:3468]. Ignored this error. 2026-02-19T08:10:36.678174Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:2331:3468], SessionActorId: [3:2322:3468], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 . sessionActorId=[3:2322:3468]. 2026-02-19T08:10:36.678630Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, ActorId: [3:2322:3468], ActorState: ExecuteState, LegacyTraceId: 01khtf6vn5d037e889q6vpcjwx, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [3:2332:3468] from: [3:2331:3468] trace_id# 2026-02-19T08:10:36.678837Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got EvAbortExecution Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-02-19T08:10:36.678944Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-02-19T08:10:36.679028Z node 3 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1221} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-02-19T08:10:36.679221Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: UNAVAILABLE Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } Result { Stats { } } TargetActor# [3:2322:3468] trace_id# 2026-02-19T08:10:36.679285Z node 3 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2825} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shutdown immediately - nothing to wait trace_id# 2026-02-19T08:10:36.679407Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:10:36.679472Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [3:2332:3468] TxId: 281474976715683. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:10:36.679830Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, ActorId: [3:2322:3468], ActorState: ExecuteState, LegacyTraceId: 01khtf6vn5d037e889q6vpcjwx, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } trace_id# 2026-02-19T08:10:36.680123Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:10:36.681165Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [3:2340:3468] TxId: 281474976715684. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [3:2331:3468] trace_id# 2026-02-19T08:10:36.681603Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [3:2340:3468] TxId: 281474976715684. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:10:36.681691Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [3:2340:3468] TxId: 281474976715684. Ctx: { TraceId: 01khtf6vn5d037e889q6vpcjwx, Database: , SessionId: ydb://session/3?node_id=3&id=YzJmMThmZTYtZDA2YTYxMDItNWM0ZDM5LWZlZGU2ODAz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed |89.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |89.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed |89.8%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-02-19T08:09:03.297774Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-02-19T08:09:03.297868Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:3:2050] Successful handshake: owner# 800, generation# 1 2026-02-19T08:09:03.298070Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-02-19T08:09:03.298113Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:3:2050] Commit generation: owner# 800, generation# 1 2026-02-19T08:09:03.298193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-02-19T08:09:03.298226Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:6:2053] Successful handshake: owner# 800, generation# 1 2026-02-19T08:09:03.298448Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-02-19T08:09:03.298481Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:6:2053] Commit generation: owner# 800, generation# 1 2026-02-19T08:09:03.298620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:03.299054Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2069] 2026-02-19T08:09:03.299093Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:3:2050] Upsert description: path# /root/tenant 2026-02-19T08:09:03.299195Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:3:2050] Subscribe: subscriber# [1:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.299437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:44:2069] 2026-02-19T08:09:03.299468Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# /root/tenant 2026-02-19T08:09:03.299508Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.299646Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:45:2069] 2026-02-19T08:09:03.299671Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:9:2056] Upsert description: path# /root/tenant 2026-02-19T08:09:03.299703Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:9:2056] Subscribe: subscriber# [1:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.299785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2026-02-19T08:09:03.299839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2069] 2026-02-19T08:09:03.299885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2026-02-19T08:09:03.299917Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:44:2069] 2026-02-19T08:09:03.299978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:45:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2026-02-19T08:09:03.300018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:45:2069] 2026-02-19T08:09:03.300100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2069] 2026-02-19T08:09:03.300160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:41:2069] 2026-02-19T08:09:03.300206Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:39:2069][/root/tenant] Set up state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:09:03.300262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:42:2069] 2026-02-19T08:09:03.300300Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:39:2069][/root/tenant] Ignore empty state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2026-02-19T08:09:03.300483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:36:2066], cookie# 0, event size# 103 2026-02-19T08:09:03.300522Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-02-19T08:09:03.300586Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-02-19T08:09:03.300748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2026-02-19T08:09:03.300816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:43:2069] 2026-02-19T08:09:03.300866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:40:2069] 2026-02-19T08:09:03.300936Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:39:2069][/root/tenant] Update to strong state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-02-19T08:09:03.817351Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-02-19T08:09:03.817427Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:3:2050] Successful handshake: owner# 800, generation# 1 2026-02-19T08:09:03.817544Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-02-19T08:09:03.817583Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:3:2050] Commit generation: owner# 800, generation# 1 2026-02-19T08:09:03.817629Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-02-19T08:09:03.817652Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:6:2053] Successful handshake: owner# 900, generation# 1 2026-02-19T08:09:03.817815Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-02-19T08:09:03.817849Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:6:2053] Commit generation: owner# 900, generation# 1 2026-02-19T08:09:03.818043Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:09:03.818412Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2069] 2026-02-19T08:09:03.818446Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:3:2050] Upsert description: path# /root/tenant 2026-02-19T08:09:03.818525Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:3:2050] Subscribe: subscriber# [3:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.818662Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:44:2069] 2026-02-19T08:09:03.818682Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:6:2053] Upsert description: path# /root/tenant 2026-02-19T08:09:03.818713Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:6:2053] Subscribe: subscriber# [3:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.818824Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:45:2069] 2026-02-19T08:09:03.818864Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:9:2056] Upsert description: path# /root/tenant 2026-02-19T08:09:03.818907Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:9:2056] Subscribe: subscriber# [3:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:09:03.819010Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2026-02-19T08:09:03.819061Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2069] 2026-02-19T08:09:03.819112Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2026-02-19 ... 94: [399:3:2050] Commit generation: owner# 910, generation# 1 2026-02-19T08:10:44.734310Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-02-19T08:10:44.734341Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [399:6:2053] Successful handshake: owner# 910, generation# 1 2026-02-19T08:10:44.734461Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-02-19T08:10:44.734481Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [399:6:2053] Commit generation: owner# 910, generation# 1 2026-02-19T08:10:44.734637Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][399:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[399:24339059:0], [399:1099535966835:0], [399:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:10:44.734998Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2069] 2026-02-19T08:10:44.735035Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:44.735107Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:3:2050] Subscribe: subscriber# [399:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:44.735249Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:44:2069] 2026-02-19T08:10:44.735272Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:44.735299Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:6:2053] Subscribe: subscriber# [399:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:44.735391Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:45:2069] 2026-02-19T08:10:44.735408Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:44.735431Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:9:2056] Subscribe: subscriber# [399:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:44.735468Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2026-02-19T08:10:44.735499Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2069] 2026-02-19T08:10:44.735547Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2026-02-19T08:10:44.735586Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:44:2069] 2026-02-19T08:10:44.735621Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2026-02-19T08:10:44.735656Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:45:2069] 2026-02-19T08:10:44.735727Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2069] 2026-02-19T08:10:44.735770Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:41:2069] 2026-02-19T08:10:44.735800Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][399:39:2069][/Root/Tenant/table_inside] Set up state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:44.735835Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:42:2069] 2026-02-19T08:10:44.735862Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][399:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2026-02-19T08:10:45.199212Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-02-19T08:10:45.199259Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:3:2050] Successful handshake: owner# 910, generation# 1 2026-02-19T08:10:45.199343Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-02-19T08:10:45.199370Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:3:2050] Commit generation: owner# 910, generation# 1 2026-02-19T08:10:45.199412Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-02-19T08:10:45.199440Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:6:2053] Successful handshake: owner# 910, generation# 1 2026-02-19T08:10:45.199584Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-02-19T08:10:45.199611Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:6:2053] Commit generation: owner# 910, generation# 1 2026-02-19T08:10:45.199688Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][401:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[401:24339059:0], [401:1099535966835:0], [401:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:10:45.199932Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:43:2069] 2026-02-19T08:10:45.199951Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:45.200015Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:3:2050] Subscribe: subscriber# [401:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:45.200163Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:44:2069] 2026-02-19T08:10:45.200185Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:45.200226Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:6:2053] Subscribe: subscriber# [401:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:45.200320Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:45:2069] 2026-02-19T08:10:45.200346Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-02-19T08:10:45.200382Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:9:2056] Subscribe: subscriber# [401:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-02-19T08:10:45.200436Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:3:2050] 2026-02-19T08:10:45.200480Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:43:2069] 2026-02-19T08:10:45.200527Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:6:2053] 2026-02-19T08:10:45.200566Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:44:2069] 2026-02-19T08:10:45.200634Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:9:2056] 2026-02-19T08:10:45.200671Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:45:2069] 2026-02-19T08:10:45.200746Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:40:2069] 2026-02-19T08:10:45.200806Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:41:2069] 2026-02-19T08:10:45.200837Z node 401 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][401:39:2069][/Root/Tenant/table_inside] Set up state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:10:45.200878Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:42:2069] 2026-02-19T08:10:45.200921Z node 401 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][401:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TExportToS3Tests::ShouldRestartOnScanErrors |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] |89.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardServerLess::StorageBilling [GOOD] >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:09:15.462354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:09:15.462441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:15.462512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:09:15.462562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:09:15.462597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:09:15.462635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:09:15.462661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:09:15.462691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:09:15.462768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:09:15.462860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:09:15.463722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:09:15.464056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:09:15.554321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:09:15.554380Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:09:15.590515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:09:15.591237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:09:15.591466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:09:15.603594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:09:15.604329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:09:15.605037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:09:15.605371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:09:15.608405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:15.608562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:09:15.609697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:09:15.609794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:09:15.609911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:09:15.609952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:09:15.610040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:09:15.610211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:09:15.762017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.763981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:09:15.764056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... populator: [1:1100:3013], at schemeshard: 72075186233409549, txId: 107, path id: 32 2026-02-19T08:10:27.201886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-02-19T08:10:27.201944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2026-02-19T08:10:27.202049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2026-02-19T08:10:27.202096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2026-02-19T08:10:27.202160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 107:0 ProposedWaitParts -> Done 2026-02-19T08:10:27.203136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-02-19T08:10:27.203254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-02-19T08:10:27.203317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-02-19T08:10:27.203368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 12 2026-02-19T08:10:27.203413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 6 2026-02-19T08:10:27.207281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-02-19T08:10:27.207390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-02-19T08:10:27.207437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-02-19T08:10:27.207471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], version: 18446744073709551615 2026-02-19T08:10:27.207504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-02-19T08:10:27.207602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-02-19T08:10:27.211123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-02-19T08:10:27.211200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2026-02-19T08:10:27.211581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-02-19T08:10:27.211784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:10:27.211838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:10:27.211889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:10:27.211928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:10:27.211971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-02-19T08:10:27.212069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1293:3169] message: TxId: 107 2026-02-19T08:10:27.212131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:10:27.212169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:10:27.212201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:0 2026-02-19T08:10:27.212301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 2 2026-02-19T08:10:27.213918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-02-19T08:10:27.215751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-02-19T08:10:27.217879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:10:27.217954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:3488:5316] TestWaitNotification: OK eventTxId 107 2026-02-19T08:10:27.237665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 1265 RawX2: 4294970445 } TabletId: 72075186233409552 State: 4 2026-02-19T08:10:27.237800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2026-02-19T08:10:27.240482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2026-02-19T08:10:27.240625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2026-02-19T08:10:27.241170Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2026-02-19T08:10:27.244459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2026-02-19T08:10:27.244741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 1 2026-02-19T08:10:27.245375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2026-02-19T08:10:27.245428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 32], at schemeshard: 72075186233409549 2026-02-19T08:10:27.245507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2026-02-19T08:10:27.248370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72075186233409549:4 2026-02-19T08:10:27.248437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2026-02-19T08:10:27.248789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2026-02-19T08:10:27.378745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-02-19T08:10:27.378871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-02-19T08:10:27.378964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-02-19T08:10:27.379033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:10:27.379068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:10:27.379094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:10:27.379126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-02-19T08:10:27.379151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-02-19T08:10:27.379176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-02-19T08:10:27.445951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:27.446098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:06:00.000000Z 2026-02-19T08:10:27.446190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:49.340702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:49.340828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:07:00.000000Z 2026-02-19T08:10:49.340894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser |89.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::CompletedExportEndTime |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::RebootDuringCompletion >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath [GOOD] >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser >> TExportToS3Tests::ShouldRetryAtFinalStage >> TExportToS3Tests::DropCopiesBeforeTransferring1 |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous >> TExportToS3Tests::UidAsIdempotencyKey >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::IndexMaterializationDisabled >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> TExportToS3Tests::RebootDuringCompletion [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:10:49.250268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:49.358581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:10:49.376770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:49.376842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:49.377347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:10:49.637159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:49.637318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:49.712260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488646696804 != 1771488646696808 2026-02-19T08:10:49.721888Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:49.767114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:49.851880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:10:50.156766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:10:50.157226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:10:50.159214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:50.160180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:10:50.162147Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-02-19T08:10:50.162212Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:404:2403] Proxy marker# C1 2026-02-19T08:10:50.173568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:50.198082Z node 1 :HIVE DEBUG: hive_impl.cpp:2523: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-02-19T08:10:50.198313Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-02-19T08:10:50.271230Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-02-19T08:10:50.271420Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-02-19T08:10:50.271740Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-02-19T08:10:50.272231Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-02-19T08:10:50.272307Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:404:2403] Proxy 2026-02-19T08:10:50.272898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:10:50.274525Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-02-19T08:10:50.274646Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-02-19T08:10:50.274719Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-02-19T08:10:50.274825Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-02-19T08:10:50.275689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:10:50.275800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-02-19T08:10:50.276739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-02-19T08:10:50.280487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:10:50.282560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:10:50.282688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:10:50.283856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-02-19T08:10:50.288416Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-02-19T08:10:50.300440Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:10:50.300577Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-02-19T08:10:50.300858Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-02-19T08:10:50.300939Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-02-19T08:10:50.301000Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-02-19T08:10:50.301156Z node 1 :HIVE DEBUG: hive_impl.cpp:2908: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-02-19T08:10:50.301667Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-02-19T08:10:50.301831Z node 1 :HIVE DEBUG: hive_impl.cpp:1112: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-02-19T08:10:50.302608Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-02-19T08:10:50.302809Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-02-19T08:10:50.302909Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137148560682592}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-02-19T08:10:50.302984Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137148560682592}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-02-19T08:10:50.303154Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137148560682592}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-02-19T08:10:50.303228Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-02-19T08:10:50.303290Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-02-19T08:10:50.303347Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-02-19T08:10:50.303524Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-02-19T08:10:50.303558Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-02-19T08:10:50.303607Z node 1 :HIVE DEBUG: hive_impl.cpp:1258: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-02-19T08:10:50.303711Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-02-19T08:10:50.303823Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-02-19T08:10:50.303946Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-02-19T08:10:50.304195Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... reason: , at schemeshard: 72057594046644480 2026-02-19T08:10:57.230447Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2026-02-19T08:10:57.233839Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:10:57.234000Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2026-02-19T08:10:57.234087Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2026-02-19T08:10:57.235525Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:10:57.235727Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:10:57.247021Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:338:2376] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:405:2404] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-02-19T08:10:57.248030Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:10:57.249151Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:10:57.249340Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:10:57.252446Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-02-19T08:10:57.252546Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-02-19T08:10:57.253020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:10:57.253109Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-02-19T08:10:57.253160Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:10:57.253254Z node 2 :HIVE DEBUG: hive_impl.cpp:929: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-02-19T08:10:57.253661Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-02-19T08:10:57.265093Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:10:57.266781Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2026-02-19T08:10:57.266856Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 4500 Status# 16 SEND to# [2:405:2404] Proxy marker# C1 2026-02-19T08:10:57.277763Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-02-19T08:10:57.354511Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2026-02-19T08:10:57.354621Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2026-02-19T08:10:57.354668Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2026-02-19T08:10:57.354953Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 5000 in 0.500000s at 4.950000s 2026-02-19T08:10:57.355401Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 4500, txid# 281474976715665 marker# C2 2026-02-19T08:10:57.355499Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 4500 Status# 17 SEND EvProposeTransactionStatus to# [2:405:2404] Proxy 2026-02-19T08:10:57.356290Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 4500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 4500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-02-19T08:10:57.356355Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:10:57.356549Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 4500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:10:57.357108Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:10:57.357166Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:10:57.357222Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4500:281474976715665] in PlanQueue unit at 72075186224037889 2026-02-19T08:10:57.357447Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 4500:281474976715665 keys extracted: 0 2026-02-19T08:10:57.357616Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:10:57.357893Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:10:57.357970Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-02-19T08:10:57.358386Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:10:57.360054Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 4500} 2026-02-19T08:10:57.360120Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:10:57.360455Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:10:57.360519Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [4500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:10:57.360570Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2026-02-19T08:10:57.360690Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:10:57.360851Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-02-19T08:10:57.360940Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-02-19T08:10:57.360992Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-02-19T08:10:57.361020Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2026-02-19T08:10:57.361059Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 acknowledged 2026-02-19T08:10:57.361923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2026-02-19T08:10:57.365236Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2026-02-19T08:10:57.365329Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:10:57.366085Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-02-19T08:10:57.366216Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 1 2026-02-19T08:10:57.366820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2026-02-19T08:10:57.367548Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:10:57.383423Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:10:57.383742Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-02-19T08:10:57.385786Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:10:57.386870Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:10:57.387364Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-02-19T08:10:57.387430Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-02-19T08:10:57.387553Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-02-19T08:10:57.387704Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-02-19T08:10:57.387849Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin >> TExportToS3Tests::IndexMaterializationDisabled [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::UserSID >> TExportToS3Tests::IndexMaterialization >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] Test command err: 2026-02-19T08:10:45.926772Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:3106} PDiskId# 1 ownerId# 6 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 6 ownerRound# 101 lsn# 12 PDiskId# 1 >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TopicsWithPermissionsExport >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData [GOOD] >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] >> TExportToS3Tests::IndexMaterialization [GOOD] >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::IndexMaterializationGlobal |89.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |89.8%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser >> LdapAuthProviderTests::CanRefreshGroupsInfo >> TExportToS3Tests::EnableChecksumsPersistance >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::TransferExportNoConnString |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> TExportToS3Tests::IndexMaterializationTwoTables >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDisabledRequestToAD >> TExportToS3Tests::IndexMaterializationGlobal [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeLdaps >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin >> TExportToS3Tests::EncryptedExport [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalAsync >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::ExportTableWithUniqueIndex >> LdapAuthProviderTests::CanRefreshGroupsInfoWithError >> TExportToS3Tests::DisableAutoDropping >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::TransferExportNoConnString [GOOD] >> ActorHandler::HttpOk >> test_analyze.py::test_basic [GOOD] >> TExportToS3Tests::TopicExport >> Other::TraceNoValidGroupForbidden >> TExportToS3Tests::TransferExportWithConnString >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:50.555459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:50.555566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:50.555633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:50.555678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:50.555713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:50.555753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:50.555797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:50.555830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:50.555894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:50.555978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:50.556915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:50.557229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:50.648930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:50.648998Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:50.661743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:50.662380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:50.662593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:50.674440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:50.675111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:50.675913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:50.676201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:50.679637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:50.679844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:50.681119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:50.681200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:50.681322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:50.681373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:50.681419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:50.681670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:50.837636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.838806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.838931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:50.839824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:11:02.698087Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-02-19T08:11:02.698117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:02.698196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-02-19T08:11:02.699357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-02-19T08:11:02.699473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:11:02.699722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:02.699762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-02-19T08:11:02.699801Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-02-19T08:11:02.700260Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:02.700367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:02.700421Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-02-19T08:11:02.700583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-02-19T08:11:02.700663Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-02-19T08:11:02.700713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:02.700768Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-02-19T08:11:02.700985Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:02.701051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:02.701144Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:02.701195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-02-19T08:11:02.701251Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:02.701296Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-02-19T08:11:02.701342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710761:0 2026-02-19T08:11:02.701420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:02.701473Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-02-19T08:11:02.701538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:11:02.701587Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:11:02.707033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.707123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.710452Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:02.710503Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:02.710651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:11:02.710770Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:02.710807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-02-19T08:11:02.710849Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-02-19T08:11:02.711629Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.711721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.711764Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-02-19T08:11:02.711828Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:11:02.711888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:02.712296Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.712375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.712397Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-02-19T08:11:02.712422Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:11:02.712446Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:02.712508Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-02-19T08:11:02.712565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-02-19T08:11:02.712874Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:11:02.712905Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:11:02.712957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:02.719016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.727810Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:02.727993Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-02-19T08:11:02.728064Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710761 2026-02-19T08:11:02.728769Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:11:02.730611Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-02-19T08:11:02.730683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [7:928:2885] TestWaitNotification: OK eventTxId 112 >> Other::TraceInvalidTokenForbidden >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin >> TExportToS3Tests::ReplicationExportWithOAuthCredentials >> TExportToS3Tests::IndexMaterializationGlobalAsync [GOOD] >> TExportToS3Tests::TopicExport [GOOD] >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> TExportToS3Tests::DisableAutoDropping [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TExportToS3Tests::TopicWithPermissionsExport >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthBasic >> TExportToS3Tests::TransferExportWithConnString [GOOD] >> TExportToS3Tests::DecimalOutOfRange >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TExportToS3Tests::AuditCompletedExport |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TExportToS3Tests::ReplicationExportWithOAuthCredentials [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDisabledRequestToAD [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTests::CanFetchGroupsWithDontExistGroupAttribute >> TExportToS3Tests::TransferExportWithConsumer >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthBasic [GOOD] >> Other::TraceHttpOk >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeLdaps [GOOD] >> TExportToS3Tests::ReplicationExportMultipleItems >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree [GOOD] >> TExportToS3Tests::TransferExportWithConsumer [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeDisableNestedGroups >> TExportToS3Tests::ExternalDataSourceAuthAWS >> TExportToS3Tests::TopicsExport |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthNone >> TSchemeShardExportToFsTests::FsExportWithMultipleTables >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin >> TExportToS3Tests::ReplicationExportMultipleItems [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthAWS [GOOD] >> TExportToS3Tests::ReplicationExportGlobalConsistency >> TExportToS3Tests::DecimalOutOfRange [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthMdbBasic >> TExportToS3Tests::TopicsExport [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TExportToS3Tests::DisableIcb >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TExportToS3Tests::ExternalDataSourceAuthNone [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system [GOOD] >> TExportToS3Tests::SystemViewWithPermissionsExport >> TExportToS3Tests::ReplicationExportGlobalConsistency [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:10:52.049410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:10:52.153411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:10:52.171807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:10:52.171886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:10:52.172406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:10:52.464697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:52.464835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:52.533529Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488649157392 != 1771488649157396 2026-02-19T08:10:52.542331Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:52.588137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:52.675177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:10:52.967989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:10:52.968392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:10:52.970268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:52.971060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:10:52.972696Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-02-19T08:10:52.972751Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:404:2403] Proxy marker# C1 2026-02-19T08:10:52.983991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:53.008110Z node 1 :HIVE DEBUG: hive_impl.cpp:2523: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-02-19T08:10:53.008244Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-02-19T08:10:53.080475Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-02-19T08:10:53.080608Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-02-19T08:10:53.080870Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-02-19T08:10:53.081358Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-02-19T08:10:53.081430Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:404:2403] Proxy 2026-02-19T08:10:53.081954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:10:53.082973Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-02-19T08:10:53.083059Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-02-19T08:10:53.083122Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-02-19T08:10:53.083162Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-02-19T08:10:53.083874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:10:53.083962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-02-19T08:10:53.084786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-02-19T08:10:53.088972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:10:53.090336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:10:53.090426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:10:53.091306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-02-19T08:10:53.094576Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-02-19T08:10:53.109512Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:10:53.109626Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-02-19T08:10:53.109894Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-02-19T08:10:53.109967Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-02-19T08:10:53.110034Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-02-19T08:10:53.110226Z node 1 :HIVE DEBUG: hive_impl.cpp:2908: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-02-19T08:10:53.110856Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-02-19T08:10:53.111028Z node 1 :HIVE DEBUG: hive_impl.cpp:1112: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-02-19T08:10:53.111838Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-02-19T08:10:53.112051Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-02-19T08:10:53.112174Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137098570870368}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-02-19T08:10:53.112255Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137098570870368}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-02-19T08:10:53.112439Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137098570870368}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-02-19T08:10:53.112537Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-02-19T08:10:53.112594Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-02-19T08:10:53.112645Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-02-19T08:10:53.112818Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-02-19T08:10:53.112849Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-02-19T08:10:53.112886Z node 1 :HIVE DEBUG: hive_impl.cpp:1258: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-02-19T08:10:53.112973Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-02-19T08:10:53.113087Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-02-19T08:10:53.113172Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-02-19T08:10:53.113355Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#7205759403796889 ... :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:11:05.097934Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:11:05.098088Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:11:05.100575Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-02-19T08:11:05.100667Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-02-19T08:11:05.101184Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:11:05.101266Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-02-19T08:11:05.101315Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:11:05.101409Z node 2 :HIVE DEBUG: hive_impl.cpp:929: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-02-19T08:11:05.101935Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-02-19T08:11:05.113052Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:11:05.123824Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-02-19T08:11:05.147475Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2026-02-19T08:11:05.147647Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 34500 2026-02-19T08:11:05.147750Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2026-02-19T08:11:05.147998Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:05.148077Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715666 ssId 72057594046644480 seqNo 2:4 2026-02-19T08:11:05.148134Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715666 at tablet 72075186224037889 2026-02-19T08:11:05.148337Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:05.148515Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:05.148585Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:05.148620Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:05.148661Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-02-19T08:11:05.159451Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:11:05.159581Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:11:05.161119Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2026-02-19T08:11:05.161201Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715666 step# 35000 Status# 16 SEND to# [2:405:2404] Proxy marker# C1 2026-02-19T08:11:05.225272Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715666 has been planned 2026-02-19T08:11:05.225385Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2026-02-19T08:11:05.225429Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2026-02-19T08:11:05.225800Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 35500 in 0.500000s at 35.450000s 2026-02-19T08:11:05.226293Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 35000, txid# 281474976715666 marker# C2 2026-02-19T08:11:05.226386Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715666 stepId# 35000 Status# 17 SEND EvProposeTransactionStatus to# [2:405:2404] Proxy 2026-02-19T08:11:05.227087Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715666 at step 35000 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 35000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-02-19T08:11:05.227152Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:05.227374Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 35000, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:11:05.227908Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:05.227976Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:05.228032Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [35000:281474976715666] in PlanQueue unit at 72075186224037889 2026-02-19T08:11:05.228240Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 35000:281474976715666 keys extracted: 0 2026-02-19T08:11:05.228388Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:05.228695Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:05.228784Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-02-19T08:11:05.229297Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:05.231820Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 35000} 2026-02-19T08:11:05.231939Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:11:05.232244Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-02-19T08:11:05.232365Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-02-19T08:11:05.232420Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-02-19T08:11:05.232453Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2026-02-19T08:11:05.232509Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 acknowledged 2026-02-19T08:11:05.232956Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:11:05.233046Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [35000 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:05.233116Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2026-02-19T08:11:05.233211Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:05.234259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2026-02-19T08:11:05.239230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2026-02-19T08:11:05.239749Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2026-02-19T08:11:05.239842Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:11:05.240508Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:11:05.257941Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:11:05.258216Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-02-19T08:11:05.260195Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:11:05.261178Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:11:05.261727Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-02-19T08:11:05.261799Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-02-19T08:11:05.261914Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-02-19T08:11:05.262058Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-02-19T08:11:05.262192Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system >> LdapAuthProviderTests::CanFetchGroupsWithDontExistGroupAttribute [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthMdbBasic [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] >> TExportToS3Tests::SystemViewWithPermissionsExport [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> ActorHandler::HttpOk [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TExportToS3Tests::ReplicatedTableExport >> TExportToS3Tests::DisableIcb [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser >> LdapAuthProviderTests::CanGetErrorIfAppropriateLoginProviderIsAbsent >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system >> ActorHandler::InvalidTokenForbidden >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeDisableNestedGroups [GOOD] >> LdapAuthProviderTests::CanFetchGroupsFromAdServer >> Other::TraceNoValidGroupForbidden [GOOD] |89.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |89.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |89.8%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |89.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> TExportToS3Tests::ExternalDataSourceAuthServiceAccount [GOOD] >> TExportToS3Tests::TopicExportWithAllFields >> ActorPage::NoValidGroupForbidden |89.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> ActorPage::InvalidTokenForbidden >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin >> TExportToS3Tests::ExternalTable >> Other::TraceInvalidTokenForbidden [GOOD] |89.9%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:57.573698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:57.573841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:57.573920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:57.573967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:57.574005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:57.574042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:57.574072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:57.574102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:57.574189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:57.574280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:57.575174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:57.575472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:57.665060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:57.665122Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:57.677302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:57.679237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:57.679455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:57.692615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:57.693364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:57.694160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:57.694445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:57.697658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:57.697868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:57.699017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:57.699076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:57.699184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:57.699233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:57.699278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:57.699466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:57.819196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.823965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.824050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.824125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.824197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:57.824268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... pathId: [OwnerId: 72057594046678944, LocalPathId: 42], version: 1 2026-02-19T08:11:09.235049Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 4 2026-02-19T08:11:09.235116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/2, is published: true 2026-02-19T08:11:09.235895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2026-02-19T08:11:09.236027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72057594037968897 2026-02-19T08:11:09.236069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-02-19T08:11:09.236647Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-02-19T08:11:09.236888Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409548 2026-02-19T08:11:09.237191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-02-19T08:11:09.237247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-02-19T08:11:09.237387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-02-19T08:11:09.237456Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-02-19T08:11:09.237574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-02-19T08:11:09.237692Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710758:1 CreateParts -> ConfigureParts 2026-02-19T08:11:09.238793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2026-02-19T08:11:09.239072Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1819: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 281474976710758, partId# 0, prevPartId# 1, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "1" CopyFromTable: "/MyRoot/Table2" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true, newTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "0" CopyFromTable: "/MyRoot/Table1" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true 2026-02-19T08:11:09.239129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-02-19T08:11:09.239560Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-02-19T08:11:09.239733Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409549 2026-02-19T08:11:09.239890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-02-19T08:11:09.239924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-02-19T08:11:09.240007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-02-19T08:11:09.240043Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-02-19T08:11:09.240099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-02-19T08:11:09.240176Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710758:0 CreateParts -> ConfigureParts 2026-02-19T08:11:09.241281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.241407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.243747Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.243999Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.246174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-02-19T08:11:09.246605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-02-19T08:11:09.246667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:1 ProgressState at tablet# 72057594046678944 2026-02-19T08:11:09.246749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:1 Propose modify scheme on dstDatashard# 72075186233409548 idx# 72057594046678944:4 srcDatashard# 72075186233409547 idx# 72057594046678944:2 operationId# 281474976710758:1 seqNo# 2:3 at tablet# 72057594046678944 2026-02-19T08:11:09.247495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.247861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.247903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2026-02-19T08:11:09.247950Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409549 idx# 72057594046678944:3 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:4 at tablet# 72057594046678944 2026-02-19T08:11:09.254034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:4 msg type: 269549568 2026-02-19T08:11:09.254184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-02-19T08:11:09.254272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409548 2026-02-19T08:11:09.254304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409547 2026-02-19T08:11:09.256959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:3 msg type: 269549568 2026-02-19T08:11:09.257076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-02-19T08:11:09.257152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409549 2026-02-19T08:11:09.257185Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 >> TExportToS3Tests::ReplicatedTableExport [GOOD] |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut/ydb-core-base-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut/ydb-core-base-ut |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExternalDataSourceAuthMdbBasic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:56.106617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:56.106719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:56.106798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:56.106845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:56.106879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:56.106914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:56.106943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:56.106980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:56.107067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:56.107170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:56.107994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:56.108263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:56.182206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:56.182265Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:56.192587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:56.193243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:56.193479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:56.204753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:56.205387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:56.206087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:56.206336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:56.209287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:56.209468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:56.210592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:56.210649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:56.210749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:56.210791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:56.210834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:56.210999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:56.358168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.359986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:11:09.549210Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:11:09.549241Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:09.549345Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:09.551328Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:09.551393Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:09.551445Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:09.552422Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-02-19T08:11:09.552582Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:11:09.552978Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000040 2026-02-19T08:11:09.553868Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:09.553982Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:09.554080Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000040, at schemeshard: 72057594046678944 2026-02-19T08:11:09.554217Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.554305Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:09.554357Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:09.554412Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:09.554471Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:09.554551Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:09.554626Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:09.554664Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-02-19T08:11:09.554715Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:09.554760Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-02-19T08:11:09.554802Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710758:0 2026-02-19T08:11:09.554876Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:09.554934Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-02-19T08:11:09.554971Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:11:09.555034Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:11:09.555776Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.557273Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:09.557306Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:09.557406Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:11:09.557563Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:09.557604Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-02-19T08:11:09.557636Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-02-19T08:11:09.558165Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.558241Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.558298Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:09.558343Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:11:09.558397Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:09.558679Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.558728Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.558748Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:09.558780Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:11:09.558809Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:09.558874Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-02-19T08:11:09.558918Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [8:128:2153] 2026-02-19T08:11:09.559177Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:11:09.559221Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:11:09.559297Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:09.560836Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.562022Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:09.562129Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-02-19T08:11:09.562205Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710758 2026-02-19T08:11:09.562775Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:11:09.564121Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:09.564193Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:747:2735] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::TopicExportWithAllFields [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceNoValidGroupForbidden [GOOD] Test command err: 2026-02-19T08:11:04.402869Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485877540670556:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:04.402944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:04.701442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:04.709915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:04.710086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:04.764907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:04.767063Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:04.770878Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485877540670528:2081] 1771488664401491 != 1771488664401494 2026-02-19T08:11:04.873667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:04.873704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:04.873734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:04.873905Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:04.875434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:05.109507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:05.175282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:05.184204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> TExportToS3Tests::ExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableIcb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:54.881462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:54.881581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:54.881656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:54.881699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:54.881774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:54.881810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:54.881841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:54.881867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:54.881932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:54.882025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:54.882866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:54.883172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:54.968356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:54.968413Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:54.982251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:54.983321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:54.983524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:54.995160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:54.995879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:54.996537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:54.996797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:55.000338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:55.000538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:55.001842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:55.001908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:55.002030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:55.002083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:55.002130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:55.002303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:55.156075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.157967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.158043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.158096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.158140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:55.158199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... esult] Discovery error: rid# 1, error# /MyRoot/Table1: CLIENT_UNAUTHENTICATED ({
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Couldn't get token for provided credentials from localhost:2135 with status TRANSPORT_UNAVAILABLE. Issue:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2135: Failed to connect to remote host: Connection refused Issue:
: Error: Grpc error response on endpoint localhost:2135 }) 2026-02-19T08:11:09.742517Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.743025Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.743100Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_replication.cpp:242: [72057594046678944] TCreateReplication TPropose opId# 101:0 ProgressState 2026-02-19T08:11:09.743174Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-02-19T08:11:09.743350Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:11:09.751077Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-02-19T08:11:09.751200Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-02-19T08:11:09.751479Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:09.751583Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:09.751658Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_replication.cpp:256: [72057594046678944] TCreateReplication TPropose opId# 101:0 HandleReply TEvOperationPlan: step# 5000038 2026-02-19T08:11:09.751863Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:11:09.752044Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:11:09.752106Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:11:09.753628Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:09.753663Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:09.753864Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:11:09.754064Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:09.754109Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:11:09.754153Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:11:09.754219Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:09.754268Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:11:09.754514Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:11:09.754569Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:09.754631Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:11:09.754674Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:09.754730Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:11:09.754791Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:09.754844Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:11:09.754893Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:11:09.755048Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:11:09.755122Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:11:09.755176Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:11:09.755214Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:11:09.756609Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:09.756692Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:09.756733Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:11:09.756787Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:11:09.756842Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:09.762370Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:09.762467Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:09.762501Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:11:09.762534Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:11:09.762566Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:11:09.762651Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:11:09.765127Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:11:09.766292Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:11:09.766524Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:11:09.766576Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:11:09.766946Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:11:09.767059Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:11:09.767117Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [8:789:2760] TestWaitNotification: OK eventTxId 101 TestWaitNotification wait txId: 102 2026-02-19T08:11:09.769666Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:11:09.769749Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:11:09.770167Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:11:09.770263Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:09.770308Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:793:2764] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] Test command err: 2026-02-19T08:11:04.991959Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485881287626657:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:04.992017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:05.024254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:05.289893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:05.290019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:05.296136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:05.330306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:05.465550Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:05.488523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:05.488560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:05.488572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:05.488778Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:05.542992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:05.733984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:05.739654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:05.784019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:05.788397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> ActorHandler::OptionsNoContent |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> Other::TraceHttpOk [GOOD] >> ActorHandler::NoValidGroupForbidden >> LdapAuthProviderTests::CanRefreshGroupsInfo [GOOD] >> LdapAuthProviderTests::CanRefreshGroupsInfoWithDisabledNestedGroups >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNamesCore::NameListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::PrefixListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::ExceptionsListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] >> LdapAuthProviderTests::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithValidCredentialsUseExternalSaslAuth >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ReplicatedTableExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:56.107577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:56.107646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:56.107699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:56.107727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:56.107753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:56.107787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:56.107814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:56.107832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:56.107878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:56.107947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:56.108525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:56.108742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:56.189657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:56.189748Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:56.201836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:56.202454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:56.202657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:56.214568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:56.215314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:56.216005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:56.216299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:56.219716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:56.219907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:56.221063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:56.221126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:56.221236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:56.221298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:56.221363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:56.221570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:56.351220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.351934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:56.352630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... alPathId: 1], version: 8 2026-02-19T08:11:11.282144Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:11.283924Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:11.284023Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:11:11.284055Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:11:11.284089Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:11:11.284123Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:11:11.284209Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:11:11.285402Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1478 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:11:11.285447Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:11:11.285600Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1478 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:11:11.285789Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1478 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:11:11.291035Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 34359741077 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:11:11.291107Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:11:11.291320Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 34359741077 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:11:11.291406Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:11:11.291547Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 724 RawX2: 34359741077 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:11:11.291648Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:11.291705Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:11.291755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:11:11.291822Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:11:11.294746Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:11:11.295058Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:11:11.295230Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:11.302410Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:11.303150Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:11:11.303211Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:11:11.303759Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:11:11.303821Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:11.303883Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:11:11.303949Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:11.304005Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:11:11.304131Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:752:2729] message: TxId: 101 2026-02-19T08:11:11.304910Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:11:11.304992Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:11:11.305038Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:11:11.305252Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:11:11.311056Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:11:11.311136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [8:753:2730] TestWaitNotification: OK eventTxId 101 2026-02-19T08:11:11.311797Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:11:11.312093Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 328us result status StatusSuccess 2026-02-19T08:11:11.312602Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> MonPage::HttpOk >> LdapAuthProviderTests::CanFetchGroupsFromAdServer [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithCustomGroupAttribute |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/reading/ydb-core-kqp-ut-olap-reading |89.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/reading/ydb-core-kqp-ut-olap-reading |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/reading/ydb-core-kqp-ut-olap-reading ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:58.048894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:58.048986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:58.049044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:58.049083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:58.049114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:58.049153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:58.049179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:58.049212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:58.049286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:58.049365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:58.050200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:58.050486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:58.135940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:58.135993Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:58.147434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:58.147994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:58.148192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:58.162998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:58.163696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:58.164409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:58.164655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:58.167862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:58.168065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:58.169544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:58.169603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:58.169702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:58.169769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:58.169809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:58.169965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:58.315553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.316995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:58.317442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:11:12.000608Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:11:12.000649Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:11:12.000754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:12.003282Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:12.003354Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:12.003420Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:12.005582Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-02-19T08:11:12.005802Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000041 2026-02-19T08:11:12.006962Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:12.007166Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740527 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:12.007241Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000041, at schemeshard: 72057594046678944 2026-02-19T08:11:12.007415Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:12.007514Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:12.007571Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:12.007633Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:12.007691Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:12.007778Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:12.007875Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:11:12.007932Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-02-19T08:11:12.008009Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:12.008060Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-02-19T08:11:12.008111Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710758:0 2026-02-19T08:11:12.008218Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:11:12.008279Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-02-19T08:11:12.008345Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:11:12.008411Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-02-19T08:11:12.009070Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.010196Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.012505Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:12.012575Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:12.012767Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:11:12.013104Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:12.013146Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:210:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-02-19T08:11:12.013192Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:210:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 40 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-02-19T08:11:12.014018Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.014152Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.014205Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:12.014273Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:11:12.014345Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:11:12.014838Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.014952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.014988Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:12.015021Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:11:12.015054Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:11:12.015150Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-02-19T08:11:12.015213Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [8:129:2153] 2026-02-19T08:11:12.015415Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:11:12.015484Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:11:12.015584Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:12.019227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.020139Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:12.020240Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-02-19T08:11:12.020336Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710758 2026-02-19T08:11:12.021105Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:11:12.022847Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.022911Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:779:2766] TestWaitNotification: OK eventTxId 103 >> Other::UnknownPathNotFound >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicExportWithAllFields [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:11:00.380176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:11:00.380252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:00.380307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:11:00.380343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:11:00.380381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:11:00.380413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:11:00.380445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:11:00.380469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:11:00.380532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:00.380599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:11:00.381484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:11:00.381836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:11:00.462483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:11:00.462531Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:00.473811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:11:00.474424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:11:00.474604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:11:00.486056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:11:00.486941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:11:00.487703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:00.487974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:11:00.494989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:00.495132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:11:00.496183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:00.496241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:00.496318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:11:00.496360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:00.496389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:11:00.496524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:11:00.629870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.630861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.630996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:00.631881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:11:11.907811Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:11:11.907845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:11.907922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:11.912157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:11.912248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:11.912310Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:11.914138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-02-19T08:11:11.914352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:11:11.914816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000040 2026-02-19T08:11:11.915222Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:11.915458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:11.915530Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000040, at schemeshard: 72057594046678944 2026-02-19T08:11:11.915713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:11.915810Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:11.915865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:11.915925Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:11.915968Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:11.916043Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:11.916127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:11.916172Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-02-19T08:11:11.916234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:11.916284Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-02-19T08:11:11.916329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710758:0 2026-02-19T08:11:11.916419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:11.916466Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-02-19T08:11:11.916517Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:11:11.916571Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:11:11.918375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.920379Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:11.920429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:11.920627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:11:11.920842Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:11.920891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-02-19T08:11:11.920941Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-02-19T08:11:11.922189Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.922307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.922347Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:11.922415Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:11:11.922471Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:11.923070Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.923164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.923201Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:11.923248Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:11:11.923283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:11.923384Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-02-19T08:11:11.923459Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-02-19T08:11:11.924226Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:11:11.924311Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:11:11.924413Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:11.927114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.928719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:11.928882Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-02-19T08:11:11.928980Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710758 2026-02-19T08:11:11.929810Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:11:11.931375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:11.931439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:905:2846] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceHttpOk [GOOD] Test command err: 2026-02-19T08:11:07.096633Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485890175032632:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:07.096707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:07.295160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:07.332263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:07.332351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:07.388285Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:07.393874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485890175032596:2081] 1771488667095214 != 1771488667095217 2026-02-19T08:11:07.402177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:07.525927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:07.525960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:07.525970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:07.526077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:07.535413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:07.739657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:07.796603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:07.810138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:07.815053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:33.216204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:33.216296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.216385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:33.216422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:33.216455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:33.216488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:33.216517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:33.216547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:33.216603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.216701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:33.217322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.217590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:33.341835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:33.341912Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:33.342826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.352188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:33.352270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:33.352437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:33.366310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:33.369999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:33.370837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.371230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:33.386262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.386464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:33.390276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:33.390359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.390614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:33.390671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:33.390740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:33.391116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.398404Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:33.531955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:33.532246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.532486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:33.532534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:33.532789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:33.532885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:33.535187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.535421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:33.535619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.535701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:33.535747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:33.535802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:33.537918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.537983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:33.538044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:33.542159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.542215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.542299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.542348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:33.546015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:33.547918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:33.548120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:33.548954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.549103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:33.549164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.549433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:33.549474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.549601Z node 1 :FLAT_TX_SCH ... as 2 2026-02-19T08:11:12.617485Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:11:12.620834Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:11:12.620921Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:11:12.620955Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-02-19T08:11:12.620987Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-02-19T08:11:12.626725Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.626870Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.626923Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:11:12.626965Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:11:12.627034Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:11:12.628239Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.628352Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.628388Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:11:12.628423Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:11:12.628459Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:11:12.629570Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.629661Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.629694Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:11:12.629749Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-02-19T08:11:12.629783Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:11:12.635152Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.635303Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:11:12.635354Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:11:12.635401Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-02-19T08:11:12.635458Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-02-19T08:11:12.635599Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:11:12.642084Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:11:12.642440Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:11:12.642845Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:11:12.644477Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:11:12.646284Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:11:12.646336Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:11:12.648261Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:11:12.648392Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.648435Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [13:2733:4720] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:11:12.650111Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:11:12.650159Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:11:12.650251Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:11:12.650283Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:11:12.650351Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:11:12.650399Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:11:12.650499Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:11:12.650531Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:11:12.650616Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:11:12.650647Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:11:12.653053Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:11:12.653240Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:11:12.653364Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.653408Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [13:2736:4723] 2026-02-19T08:11:12.653622Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:11:12.654260Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.654307Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [13:2736:4723] 2026-02-19T08:11:12.654522Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:11:12.654627Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.654662Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [13:2736:4723] 2026-02-19T08:11:12.654794Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:11:12.654937Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.654970Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [13:2736:4723] 2026-02-19T08:11:12.655161Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:11:12.655194Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [13:2736:4723] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/fulltext/ydb-core-kqp-ut-indexes-fulltext |89.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/fulltext/ydb-core-kqp-ut-indexes-fulltext |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/fulltext/ydb-core-kqp-ut-indexes-fulltext >> LdapAuthProviderTests::CanRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTests::LdapServerIsUnavailable >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeStartTls >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin >> LdapAuthProviderTests::CanNotFetchGroupsWithInvalidRobotUserLogin |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |89.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin >> ActorPage::NoValidGroupForbidden [GOOD] >> ActorPage::OptionsNoContent |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> ActorPage::InvalidTokenForbidden [GOOD] >> ActorPage::NoUseAuthOk >> ActorHandler::InvalidTokenForbidden [GOOD] |89.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> LdapAuthProviderTests::CanFetchGroupsWithCustomGroupAttribute [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithValidCredentialsUseExternalSaslAuth [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] >> TMemoryStatsAggregator::Compaction_Single [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |89.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055 folder_id=folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055 iam_token=t1.prefix_of_token.super_mega_well_formed_iam_token cloud_account=acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055 2026-02-19T08:10:25.337084Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055]","tx_id":"281474976720693","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:25.476520Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055/0000000000000001029d]","tx_id":"281474976720699","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:25.517150Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055/0000000000000001029d/v2]","tx_id":"281474976720700","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:25.608558Z: {"request_id":"66e133b6-d66959be-7e81c730-787f37a3","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055"} ======================================== 2026-02-19T08:10:26.072820Z: {"request_id":"66e133b6-d66959be-7e81c730-787f37a3","permission":"ymq.queues.create","id":"8109100011203863784$CreateMessageQueue$2026-02-19T08:10:26.072631Z","idempotency_id":"8109100011203863784$CreateMessageQueue$2026-02-19T08:10:25.411000Z","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:25.411000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_718eec67-0d6a-11f1-83c5-d00d34a5b055.fifo","resource_id":"0000000000000001029d","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:26.979076Z: {"request_id":"f2e5e9b6-c4964bc2-ef3f6369-cb0f6505","permission":"ymq.queues.setAttributes","id":"12814529322783809543$UpdateMessageQueue$2026-02-19T08:10:26.978887Z","idempotency_id":"12814529322783809543$UpdateMessageQueue$2026-02-19T08:10:26.743000Z","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:26.743000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_718eec67-0d6a-11f1-83c5-d00d34a5b055.fifo","resource_id":"0000000000000001029d","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:28.349170Z: {"request_id":"b255772-fe71c4aa-9f1c6b81-39dd9322","permission":"ymq.queues.setAttributes","id":"13087179532305168551$UpdateMessageQueue$2026-02-19T08:10:28.348994Z","idempotency_id":"13087179532305168551$UpdateMessageQueue$2026-02-19T08:10:27.857000Z","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:27.857000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_718eec67-0d6a-11f1-83c5-d00d34a5b055.fifo","resource_id":"0000000000000001029d","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:29.112811Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055/0000000000000001029d/v2]","tx_id":"281474976720714","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:29.145678Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055/0000000000000001029d]","tx_id":"281474976720715","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:29.177744Z: {"request_id":"46ccd1a1-50654c5d-d3593651-21216d4f","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","subject":"fake_user_sid@as","queue":"0000000000000001029d","resource_id":"0000000000000001029d","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055"} ======================================== 2026-02-19T08:10:29.837159Z: {"request_id":"46ccd1a1-50654c5d-d3593651-21216d4f","permission":"ymq.queues.delete","id":"3187269954968897477$DeleteMessageQueue$2026-02-19T08:10:29.837024Z","idempotency_id":"3187269954968897477$DeleteMessageQueue$2026-02-19T08:10:29.036000Z","cloud_id":"CLOUD_FOR_folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:29.036000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_718eec67-0d6a-11f1-83c5-d00d34a5b055.fifo","resource_id":"0000000000000001029d","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_71bdf7bf-0d6a-11f1-8182-d00d34a5b055","component":"ymq"} ======================================== ======================================== run test with cloud_id=CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055 folder_id=folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055 iam_token=t1.prefix_of_token.super_mega_well_formed_iam_token cloud_account=acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055 2026-02-19T08:10:41.707363Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055]","tx_id":"281474976720730","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:41.813506Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055/000000000000000307id]","tx_id":"281474976720736","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:41.848912Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055/000000000000000307id/v4]","tx_id":"281474976720737","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:41.932231Z: {"request_id":"e1858e85-b58c2d4d-b48859bb-add6f054","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055"} ======================================== 2026-02-19T08:10:41.940297Z: {"request_id":"e1858e85-b58c2d4d-b48859bb-add6f054","permission":"ymq.queues.create","id":"17387082685119533160$CreateMessageQueue$2026-02-19T08:10:41.940027Z","idempotency_id":"17387082685119533160$CreateMessageQueue$2026-02-19T08:10:41.758000Z","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:41.758000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_7b66974a-0d6a-11f1-9981-d00d34a5b055","resource_id":"000000000000000307id","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:43.958048Z: {"request_id":"99b08e50-5e24304a-eba9e140-54d99af7","permission":"ymq.queues.setAttributes","id":"14324487803792033028$UpdateMessageQueue$2026-02-19T08:10:43.957870Z","idempotency_id":"14324487803792033028$UpdateMessageQueue$2026-02-19T08:10:43.028000Z","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:43.028000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_7b66974a-0d6a-11f1-9981-d00d34a5b055","resource_id":"000000000000000307id","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:44.482527Z: {"request_id":"4d7fd375-bc120dc9-ea17e13f-5db23420","permission":"ymq.queues.setAttributes","id":"13406128523747731305$UpdateMessageQueue$2026-02-19T08:10:44.482330Z","idempotency_id":"13406128523747731305$UpdateMessageQueue$2026-02-19T08:10:44.110000Z","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:44.110000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_7b66974a-0d6a-11f1-9981-d00d34a5b055","resource_id":"000000000000000307id","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","component":"ymq"} ======================================== 2026-02-19T08:10:45.201425Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055/000000000000000307id/v4]","tx_id":"281474976720758","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:45.219448Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055/000000000000000307id]","tx_id":"281474976720759","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-02-19T08:10:45.232840Z: {"request_id":"b3ce2f76-26227b7c-fd1598eb-cb356551","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","subject":"fake_user_sid@as","queue":"000000000000000307id","resource_id":"000000000000000307id","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055"} ======================================== 2026-02-19T08:10:45.975622Z: {"request_id":"b3ce2f76-26227b7c-fd1598eb-cb356551","permission":"ymq.queues.delete","id":"2139060350304850734$DeleteMessageQueue$2026-02-19T08:10:45.975459Z","idempotency_id":"2139060350304850734$DeleteMessageQueue$2026-02-19T08:10:45.155000Z","cloud_id":"CLOUD_FOR_folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","masked_token":"t1.prefix_of_token.**** (2764EAA0)","auth_type":"service_account","remote_address":"::1","created_at":"2026-02-19T08:10:45.155000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_7b66974a-0d6a-11f1-9981-d00d34a5b055","resource_id":"000000000000000307id","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_7b88afe5-0d6a-11f1-91d0-d00d34a5b055","component":"ymq"} ======================================== ======================================== |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeStartTls [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeNonSecure >> LdapAuthProviderTests::LdapServerIsUnavailable [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] Test command err: 2026-02-19T08:11:04.126803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485878191739765:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:04.128015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:04.385805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:04.409638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:04.409777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:04.481758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:04.487145Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:04.519323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485878191739733:2081] 1771488664124865 != 1771488664124868 2026-02-19T08:11:04.595007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:04.637250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:04.637273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:04.637285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:04.637378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:04.853739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:04.908317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:04.912821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:10.075749Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485903192100426:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:10.075960Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:10.091373Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:10.191080Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:10.196423Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485903192100398:2081] 1771488670074587 != 1771488670074590 2026-02-19T08:11:10.231330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:10.231435Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:10.242576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:10.339512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:10.393149Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:10.393197Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:10.393208Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:10.393301Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:10.725218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:10.774853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:10.786460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_EnglishAlphabet [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> Path::Name_AllSymbols [GOOD] >> Path::Name_ExtraSymbols [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTests::CanFetchGroupsWithCustomGroupAttribute [GOOD] Test command err: 2026-02-19T08:11:03.455108Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485876429539858:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:03.455224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:03.709014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:03.709118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:03.727072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:03.743081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:03.773756Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:03.774186Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485876429539828:2081] 1771488663453699 != 1771488663453702 2026-02-19T08:11:03.871985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:03.872006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:03.872022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:03.872178Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:03.952519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:04.065849Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:04.066690Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:04.066721Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:04.067944Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldaps, uris: ldaps://127.0.0.1:12703, port: 12703 2026-02-19T08:11:04.068009Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:04.126325Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:04.171430Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:04.171954Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:04.172019Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:04.218119Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:04.273929Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:04.276605Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****8vAw (4809DAB2) () has now valid token of ldapuser@ldap 2026-02-19T08:11:06.822568Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485889048379883:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:06.822652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:06.845046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:06.906623Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:06.907790Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485889048379844:2081] 1771488666821577 != 1771488666821580 2026-02-19T08:11:06.949454Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:06.949555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:06.952505Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:07.010013Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:07.010040Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:07.010047Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:07.010137Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:07.015309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:07.131177Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:07.134685Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:07.134755Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:07.135483Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:27684, port: 27684 2026-02-19T08:11:07.135605Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:07.136371Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:07.182276Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****KFOA (EFA24862) () has now valid token of ldapuser@ldap 2026-02-19T08:11:10.373850Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608485905675638312:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:10.375860Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:10.409504Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:10.480717Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:10.480794Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:10.481745Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:10.483382Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608485905675638251:2081] 1771488670360318 != 1771488670360321 2026-02-19T08:11:10.511822Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:10.580196Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:10.586347Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:10.586370Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:10.586378Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:10.586458Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:10.725873Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:10.729114Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:10.729147Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:10.729860Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:28076, port: 28076 2026-02-19T08:11:10.729961Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:10.730574Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:10.778015Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:10.778454Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:10.778490Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:10.824858Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:10.869736Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:10.870496Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****ZtjQ (A05951C9) () has now valid token of ldapuser@ldap 2026-02-19T08:11:14.159341Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608485924074530332:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:14.160156Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:14.183077Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:14.273627Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:14.275839Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608485924074530284:2081] 1771488674156986 != 1771488674156989 2026-02-19T08:11:14.300122Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:14.300203Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:14.310296Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:14.397143Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:14.416538Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:14.416570Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:14.416577Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:14.416648Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:14.563116Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:14.570925Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:14.570957Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:14.571673Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:19317, port: 19317 2026-02-19T08:11:14.571770Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:14.573188Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-02-19T08:11:14.618020Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:14.618643Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:14.618690Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-02-19T08:11:14.666492Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-02-19T08:11:14.710118Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-02-19T08:11:14.710963Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****YOmQ (AD714597) () has now valid token of ldapuser@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTests::CanFetchGroupsWithValidCredentialsUseExternalSaslAuth [GOOD] Test command err: 2026-02-19T08:11:03.277445Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485874855813884:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:03.277535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:03.463110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:03.482285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:03.482407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:03.541589Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:03.545860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:03.548838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485874855813773:2081] 1771488663245086 != 1771488663245089 2026-02-19T08:11:03.658490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:03.658510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:03.658517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:03.658603Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:03.698273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:03.785256Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:03.788847Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:03.788878Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:03.789504Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:18900, port: 18900 2026-02-19T08:11:03.790074Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:03.790659Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:03.834915Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****dt1Q (5840C7D6) () has now valid token of ldapuser@ldap 2026-02-19T08:11:06.642371Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485888390643315:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:06.642484Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:06.657924Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:06.737768Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:06.741225Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485888390643285:2081] 1771488666641224 != 1771488666641227 2026-02-19T08:11:06.780954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:06.781038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:06.784135Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:06.823227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:06.839912Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:06.839928Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:06.839932Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:06.839998Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:06.928425Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:06.932200Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:06.932247Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:06.932944Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:61666, port: 61666 2026-02-19T08:11:06.933041Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:06.933687Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-02-19T08:11:06.982278Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****HeSw (74BCB9B1) () has now valid token of ldapuser@ldap 2026-02-19T08:11:09.963053Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608485899912729851:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:09.963122Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:09.980652Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:10.037697Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:10.039566Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608485899912729821:2081] 1771488669962025 != 1771488669962028 2026-02-19T08:11:10.048248Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:10.048341Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:10.073909Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:10.182287Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:10.182327Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:10.182336Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:10.182412Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:10.191091Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:10.314641Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:10.314677Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:10.314745Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:875: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****eN7Q (4D614C44)) 2026-02-19T08:11:10.970076Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:12.969861Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****eN7Q (4D614C44) () has now permanent error message 'Login state is not available' 2026-02-19T08:11:12.969926Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:2360: Finish waiting for login providers for 1 databases: /Root, 2026-02-19T08:11:14.033906Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:14.033993Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:14.107883Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:14.109966Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608485917646584581:2081] 1771488673889341 != 1771488673889344 2026-02-19T08:11:14.120690Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:14.120787Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:14.148364Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:14.224578Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:14.224616Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:14.224622Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:14.224704Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:14.267722Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:14.380776Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:14.384191Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:14.384221Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:14.384882Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldaps, uris: ldaps://127.0.0.1:14902, port: 14902 2026-02-19T08:11:14.384944Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:196: bind: Sasl EXTERNAL verify: ok=1 depth=1 err=0 (ok) subject=/C=RU/ST=MSK/L=MSK/O=YA/OU=UtTest/CN=testCA verify: ok=1 depth=0 err=0 (ok) subject=/C=RU/ST=MSK/L=MSK/O=YA/OU=UtTest/CN=localhost 2026-02-19T08:11:14.450275Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:14.498755Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:14.499355Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:14.499408Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:14.546162Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:14.590189Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:14.591434Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****AfSg (5B12BDD6) () has now valid token of ldapuser@ldap >> LdapAuthProviderTests::CanNotFetchGroupsWithInvalidRobotUserLogin [GOOD] >> LdapAuthProviderTests::CanNotFetchGroupsWithInvalidRobotUserPassword >> NFulltext::Analyze [GOOD] >> NFulltext::AnalyzeRu [GOOD] >> NFulltext::AnalyzeInvalid [GOOD] >> NFulltext::AnalyzeFilterLength [GOOD] >> NFulltext::AnalyzeFilterLengthRu [GOOD] >> NFulltext::AnalyzeFilterNgram [GOOD] >> NFulltext::AnalyzeFilterSnowball [GOOD] >> NFulltext::BuildNgramsUtf8 [GOOD] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_analyze.py::test_basic [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_ExtraSymbols [GOOD] >> ActorHandler::OptionsNoContent [GOOD] >> ActorPage::HttpOk >> ActorHandler::NoValidGroupForbidden [GOOD] >> ActorHandler::NoUseAuthOk >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] >> TGuardianImpl::FollowerTracker [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> NFulltext::BuildNgramsUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] Test command err: 2026-02-19T08:11:03.861482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485874593321230:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:03.861595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:04.095387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:04.119256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:04.119378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:04.182118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:04.185788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485874593321091:2081] 1771488663854685 != 1771488663854688 2026-02-19T08:11:04.200972Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:04.270379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:04.281471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:04.281522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:04.281536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:04.281645Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:04.456325Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:04.459652Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:04.459707Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:04.460403Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:62306, port: 62306 2026-02-19T08:11:04.460917Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:04.461614Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:04.461907Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:353: Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy 2026-02-19T08:11:04.462532Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****z1Rw (FF5DE419) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy)' 2026-02-19T08:11:04.462825Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:04.462852Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:04.463685Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:62306, port: 62306 2026-02-19T08:11:04.463770Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:04.464284Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:04.464546Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:353: Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy 2026-02-19T08:11:04.464745Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****z1Rw (FF5DE419) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy)' 2026-02-19T08:11:04.867593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:05.861970Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****z1Rw (FF5DE419) 2026-02-19T08:11:05.862300Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:05.862316Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:05.863658Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:62306, port: 62306 2026-02-19T08:11:05.863792Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:05.869907Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:05.870304Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:353: Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy 2026-02-19T08:11:05.870882Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****z1Rw (FF5DE419) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://127.0.0.1:62306. Server is busy)' 2026-02-19T08:11:08.861137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485874593321230:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:08.861222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:08.864693Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****z1Rw (FF5DE419) 2026-02-19T08:11:08.864924Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:08.864952Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:08.865563Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:62306, port: 62306 2026-02-19T08:11:08.865680Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:08.866264Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:08.914045Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:08.914527Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:08.914587Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:08.958104Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:09.006105Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:09.006930Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****z1Rw (FF5DE419) () has now valid token of ldapuser@ldap 2026-02-19T08:11:12.870343Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****z1Rw (FF5DE419) 2026-02-19T08:11:12.870431Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:62306, port: 62306 2026-02-19T08:11:12.870514Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:12.871680Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:12.914018Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:12.914474Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:12.914528Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:12.958038Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:13.002053Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:13.002732Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****z1Rw (FF5DE419) () has now valid token of ldapuser@ldap 2026-02-19T08:11:15.315057Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485926733497828:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:15.316091Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:15.319898Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:15.398105Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:15.400615Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485926733497796:2081] 1771488675297311 != 1771488675297314 2026-02-19T08:11:15.410843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:15.422303Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:15.422395Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:15.436048Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:15.559489Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:15.559513Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:15.559521Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:15.559602Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:15.660521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:15.725562Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:15.726019Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:15.726049Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:15.726725Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://unavailablehost:7670, port: 7670 2026-02-19T08:11:15.726783Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-02-19T08:11:15.733768Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-02-19T08:11:15.733939Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****AO1Q (10076380) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-02-19T08:11:15.734189Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:15.734224Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:15.735006Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://unavailablehost:7670, port: 7670 2026-02-19T08:11:15.735063Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-02-19T08:11:15.739637Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-02-19T08:11:15.739776Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****AO1Q (10076380) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system >> MonPage::HttpOk [GOOD] >> MonPage::OptionsNoContent >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] >> Cdc::UuidExchange[PqRunner] >> Other::UnknownPathNotFound [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system >> Cdc::NewAndOldImagesLog[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:11:18.378678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:11:18.378794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:18.378862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:11:18.378898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:11:18.378942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:11:18.378978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:11:18.379020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:11:18.379052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:11:18.379128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:18.379191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:11:18.380085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:11:18.380359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:11:18.473239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:11:18.473300Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:18.492632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:11:18.493417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:11:18.493631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:11:18.510860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:11:18.512260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:11:18.512977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:18.513289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:11:18.516943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:18.517120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:11:18.518329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:18.518388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:18.518485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:11:18.518530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:18.518580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:11:18.518783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:11:18.681737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.682713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.682830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.682904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.682978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.683656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... as 5 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:18692 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 668B1864-1CD6-4816-B550-42D1FC068B43 amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /data_01.csv / / 11 2026-02-19T08:11:19.718865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:11:19.719475Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2837], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2026-02-19T08:11:19.721663Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:884:2839], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2026-02-19T08:11:19.721749Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:884:2839], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-02-19T08:11:19.721910Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:883:2836], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18692 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 4058A33F-5CB4-4A6C-BD24-BD3F65B9AC90 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2026-02-19T08:11:19.729616Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2837], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-02-19T08:11:19.730111Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:880:2835] 2026-02-19T08:11:19.730219Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:881:2837], sender# [1:880:2835], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18692 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: DDE0D1C5-F727-4164-8F03-4AF9FFA71CD6 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-02-19T08:11:19.742441Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2837], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-02-19T08:11:19.742525Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:881:2837], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-02-19T08:11:19.743019Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:880:2835], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-02-19T08:11:19.750612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:11:19.775942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 721 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.776027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:11:19.776259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 721 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.776864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:303: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 721 RawX2: 4294969998 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.776973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:19.777346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:19.777884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970001 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.777933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:11:19.778072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970001 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.778194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:303: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 727 RawX2: 4294970001 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-02-19T08:11:19.778260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:19.778302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:11:19.778346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:11:19.778507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:11:19.778569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:11:19.778699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:19.781785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:11:19.781948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:11:19.782358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:11:19.782411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:11:19.782561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:11:19.782608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:11:19.782671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:11:19.782706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:11:19.782763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:11:19.782844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:772:2737] message: TxId: 102 2026-02-19T08:11:19.782919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:11:19.782964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:11:19.783009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:11:19.783151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:11:19.785524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:19.785604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:857:2814] TestWaitNotification: OK eventTxId 102 >> YdbProxy::ReadTopic >> Cdc::KeysOnlyLog[PqRunner] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin >> TExportToS3Tests::SchemaMapping >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeNonSecure [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeUseListOfHosts >> KqpFulltextIndexes::DeleteRowReturning >> KqpFulltextIndexes::SelectWithFulltextMatch+UTF8 >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin >> KqpFulltextIndexes::SelectWithFulltextRelevanceB1FactorAndK1Factor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::UnknownPathNotFound [GOOD] Test command err: 2026-02-19T08:11:14.766291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485923599898831:2255];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:14.766360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:15.081957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:15.112314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:15.112496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:15.168159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:15.172079Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:15.178319Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485923599898614:2081] 1771488674724378 != 1771488674724381 2026-02-19T08:11:15.288650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:15.333377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:15.333407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:15.333416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:15.333561Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:15.626038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:15.699834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:15.714311Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:15.717843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> YdbProxy::CopyTable >> LdapAuthProviderTests::CanNotFetchGroupsWithInvalidRobotUserPassword [GOOD] >> LdapAuthProviderTests::CanNotFetchGroupsUseInvalidSearchFilter >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> KqpService::CloseSessionDuringNonFinalCleanup >> ActorPage::OptionsNoContent [GOOD] >> LdapAuthProviderTests::CanRefreshGroupsInfoWithDisabledNestedGroups [GOOD] >> LdapAuthProviderTests::CanNotRequestWithEmptyHost >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> TExportToS3Tests::CancelledExportEndTime >> ActorPage::NoUseAuthOk [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2026-02-19T08:11:10.598153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485903442379664:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:10.599025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:10.880497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:10.912378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:10.912520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:11.019545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:11.066512Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:11.067947Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485903442379622:2081] 1771488670593284 != 1771488670593287 2026-02-19T08:11:11.084377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:11.186232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:11.186264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:11.186273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:11.188557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:11.516835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:11.527055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:11.583507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:11.596106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:11.604719Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:17.195765Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485934187942482:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:17.196101Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:17.279793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:17.425878Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:17.429941Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485934187942433:2081] 1771488677193797 != 1771488677193800 2026-02-19T08:11:17.449306Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:17.449435Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:17.467675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:17.548599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:17.626541Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:17.626574Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:17.626584Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:17.626673Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:18.005849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:18.063653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:18.068493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:18.080028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:11:18.218323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> EraseRowsTests::EraseRowsShouldSuccess >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::NoUseAuthOk [GOOD] Test command err: 2026-02-19T08:11:10.686531Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485903833046337:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:10.686679Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:10.933852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:10.951511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:10.951627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:11.036858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:11.063715Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:11.069918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485903833046292:2081] 1771488670684174 != 1771488670684177 2026-02-19T08:11:11.197968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:11.252889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:11.252911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:11.252915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:11.253003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:11.552839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:11.616752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:11.623758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:11.633956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:11:11.707275Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:17.591523Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485934930610639:2156];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:17.591659Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:17.614571Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:17.750042Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:17.751310Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:17.752482Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:17.752571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:17.787480Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:17.916791Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:17.938793Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:17.938822Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:17.938836Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:17.938937Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:18.270442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:18.279088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:18.371182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:18.390193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:18.397853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:11:18.609104Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> LdapAuthProviderTests::CanFetchGroupsWithDefaultGroupAttributeUseListOfHosts [GOOD] >> LdapAuthProviderTests::CanFetchGroupsWithDelayUpdateSecurityState >> VDiskTest::HugeBlobWrite [GOOD] >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |89.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> TExportToS3Tests::Checksums >> ActorPage::HttpOk [GOOD] >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [FAIL] >> ActorHandler::NoUseAuthOk [GOOD] >> LdapAuthProviderTests::CanNotFetchGroupsUseInvalidSearchFilter [GOOD] >> LdapAuthProviderTests::CanNotFetchGroupsOverSaslExternalWithoutClientCert >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> MonPage::OptionsNoContent [GOOD] >> LdapAuthProviderTests::CanNotRequestWithEmptyHost [GOOD] >> LdapAuthProviderTests::CanNotRequestWithEmptyBindPassword >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> TExportToS3Tests::Checksums [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::HttpOk [GOOD] Test command err: 2026-02-19T08:11:13.039182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485917472538411:2102];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:13.039855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:13.655538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:13.682007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:13.682117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:13.687421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:13.729685Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:13.733888Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485913177571032:2081] 1771488673022999 != 1771488673023002 2026-02-19T08:11:13.941929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:14.052352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:14.052374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:14.052392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:14.052498Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:14.052800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:14.340486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:14.433998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:14.439827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:20.335419Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485947429791412:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:20.335493Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:20.397865Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:20.510342Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:20.510434Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:20.511601Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485947429791381:2081] 1771488680332909 != 1771488680332912 2026-02-19T08:11:20.527492Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:20.543546Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:20.632918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:20.714497Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:20.714523Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:20.714532Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:20.714636Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:21.015436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:21.023005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:21.104808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:21.112123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:21.127143Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite [GOOD] Test command err: RandomSeed# 8235524283139264356 Put id# [42:1:1:0:0:20616:1] totalSize# 20616 blobs# 1 hash# 801283063445698868 Put id# [70:1:1:0:0:108285:1] totalSize# 128901 blobs# 2 hash# 15370859521001171259 Put id# [13:1:1:0:0:376743:1] totalSize# 505644 blobs# 3 hash# 9671070217728005928 Put id# [15:1:1:0:0:393278:1] totalSize# 898922 blobs# 4 hash# 2350268893855050491 Put id# [33:1:1:0:0:82241:1] totalSize# 981163 blobs# 5 hash# 16436771422991534562 Put id# [35:1:1:0:0:639670:1] totalSize# 1620833 blobs# 6 hash# 17464335760553897859 Put id# [78:1:1:0:0:409418:1] totalSize# 2030251 blobs# 7 hash# 2189240089568224482 Trim Put id# [98:1:1:0:0:382055:1] totalSize# 2412306 blobs# 8 hash# 6044952863754657589 Change MinHugeBlobSize# 262144 Put id# [7:1:1:0:0:140767:1] totalSize# 2553073 blobs# 9 hash# 2670507440986870131 Put id# [65:1:1:0:0:495650:1] totalSize# 3048723 blobs# 10 hash# 16488113932764856511 Put id# [18:1:1:0:0:281298:1] totalSize# 3330021 blobs# 11 hash# 16291530122582159951 Put id# [61:1:1:0:0:286099:1] totalSize# 3616120 blobs# 12 hash# 11065764169230212280 Put id# [13:1:2:0:0:419150:1] totalSize# 4035270 blobs# 13 hash# 5672238805406680566 Put id# [34:1:1:0:0:578941:1] totalSize# 4614211 blobs# 14 hash# 5517564490423093964 Trim Put id# [19:1:1:0:0:391748:1] totalSize# 5005959 blobs# 15 hash# 17581398867282086907 Put id# [57:1:1:0:0:575752:1] totalSize# 5581711 blobs# 16 hash# 14706823718558074079 Trim Put id# [9:1:1:0:0:267082:1] totalSize# 5848793 blobs# 17 hash# 11611485635351430152 Put id# [92:1:1:0:0:439954:1] totalSize# 6288747 blobs# 18 hash# 3866149910018501468 Put id# [75:1:1:0:0:66138:1] totalSize# 6354885 blobs# 19 hash# 9513937879278611517 Put id# [79:1:1:0:0:638123:1] totalSize# 6993008 blobs# 20 hash# 10539715665732704967 Compact Put id# [59:1:1:0:0:349053:1] totalSize# 7342061 blobs# 21 hash# 4048244556783300462 Put id# [63:1:1:0:0:72587:1] totalSize# 7414648 blobs# 22 hash# 15194351885532531073 Trim Put id# [51:1:1:0:0:60639:1] totalSize# 7475287 blobs# 23 hash# 9985975701663657082 Put id# [52:1:1:0:0:65389:1] totalSize# 7540676 blobs# 24 hash# 9526150870786407236 Put id# [20:1:1:0:0:627438:1] totalSize# 8168114 blobs# 25 hash# 13291752564539646939 Put id# [43:1:1:0:0:142663:1] totalSize# 8310777 blobs# 26 hash# 13376685902916057852 Put id# [75:1:2:0:0:468856:1] totalSize# 8779633 blobs# 27 hash# 15828077785299094541 Put id# [82:1:1:0:0:387203:1] totalSize# 9166836 blobs# 28 hash# 8205954133688822104 Put id# [52:1:2:0:0:594690:1] totalSize# 9761526 blobs# 29 hash# 170215242884491884 Put id# [89:1:1:0:0:284481:1] totalSize# 10046007 blobs# 30 hash# 10697126669683498492 Put id# [79:1:2:0:0:318423:1] totalSize# 10364430 blobs# 31 hash# 5601673165485779442 Put id# [56:1:1:0:0:336401:1] totalSize# 10700831 blobs# 32 hash# 5460571258329530220 Put id# [15:1:2:0:0:103569:1] totalSize# 10804400 blobs# 33 hash# 1657551724349813360 Put id# [25:1:1:0:0:594088:1] totalSize# 11398488 blobs# 34 hash# 16709651198230290947 Put id# [96:1:1:0:0:208804:1] totalSize# 11607292 blobs# 35 hash# 15253589789498846515 Trim Put id# [58:1:1:0:0:538030:1] totalSize# 12145322 blobs# 36 hash# 17948316028074867255 Put id# [100:1:1:0:0:601976:1] totalSize# 12747298 blobs# 37 hash# 15761051524946745449 Trim Put id# [85:1:1:0:0:18801:1] totalSize# 12766099 blobs# 38 hash# 10888198342549435591 Put id# [21:1:1:0:0:268393:1] totalSize# 13034492 blobs# 39 hash# 17741136556234247486 Put id# [68:1:1:0:0:23227:1] totalSize# 13057719 blobs# 40 hash# 1000273575451060137 Put id# [71:1:1:0:0:160459:1] totalSize# 13218178 blobs# 41 hash# 4026694170877401930 Put id# [92:1:2:0:0:397236:1] totalSize# 13615414 blobs# 42 hash# 15541712060629569280 Put id# [3:1:1:0:0:200404:1] totalSize# 13815818 blobs# 43 hash# 15436479452067423046 Trim Put id# [57:1:2:0:0:41868:1] totalSize# 13857686 blobs# 44 hash# 15400758942339878433 Put id# [6:1:1:0:0:57234:1] totalSize# 13914920 blobs# 45 hash# 3925498678026963262 Put id# [1:1:1:0:0:612586:1] totalSize# 14527506 blobs# 46 hash# 13316351132247660485 Put id# [61:1:2:0:0:560540:1] totalSize# 15088046 blobs# 47 hash# 12876571464341115438 Put id# [47:1:1:0:0:92823:1] totalSize# 15180869 blobs# 48 hash# 5884047830251171023 Put id# [50:1:1:0:0:394627:1] totalSize# 15575496 blobs# 49 hash# 10000078313891907945 Put id# [5:1:1:0:0:198428:1] totalSize# 15773924 blobs# 50 hash# 16500182957169688310 Put id# [26:1:1:0:0:227952:1] totalSize# 16001876 blobs# 51 hash# 13094957681675322882 Put id# [91:1:1:0:0:447550:1] totalSize# 16449426 blobs# 52 hash# 8056653545763042938 Put id# [77:1:1:0:0:369297:1] totalSize# 16818723 blobs# 53 hash# 11358950127000733769 Change MinHugeBlobSize# 32768 Put id# [3:1:2:0:0:592073:1] totalSize# 17410796 blobs# 54 hash# 1142522893986916717 Put id# [25:1:2:0:0:457007:1] totalSize# 17867803 blobs# 55 hash# 9423642573599689606 Put id# [18:1:2:0:0:286809:1] totalSize# 18154612 blobs# 56 hash# 3971881394384634819 Put id# [34:1:2:0:0:59166:1] totalSize# 18213778 blobs# 57 hash# 7125848736145768910 Put id# [79:1:3:0:0:219396:1] totalSize# 18433174 blobs# 58 hash# 12233967254576278689 Put id# [4:1:1:0:0:194870:1] totalSize# 18628044 blobs# 59 hash# 1855808129085360164 Put id# [71:1:2:0:0:140758:1] totalSize# 18768802 blobs# 60 hash# 6985737180631135101 Put id# [94:1:1:0:0:400271:1] totalSize# 19169073 blobs# 61 hash# 11161865376017653591 Put id# [82:1:2:0:0:444910:1] totalSize# 19613983 blobs# 62 hash# 1182308828135512812 Put id# [27:1:1:0:0:215026:1] totalSize# 19829009 blobs# 63 hash# 12960978748597121075 Put id# [7:1:2:0:0:79002:1] totalSize# 19908011 blobs# 64 hash# 4721513201159162879 Put id# [3:1:3:0:0:491384:1] totalSize# 20399395 blobs# 65 hash# 4401958315507204073 Change MinHugeBlobSize# 8192 Put id# [55:1:1:0:0:481827:1] totalSize# 20881222 blobs# 66 hash# 4802991784114073226 Put id# [48:1:1:0:0:191442:1] totalSize# 21072664 blobs# 67 hash# 9458923824556426260 Put id# [20:1:2:0:0:195601:1] totalSize# 21268265 blobs# 68 hash# 13283258733716961001 Put id# [20:1:3:0:0:208563:1] totalSize# 21476828 blobs# 69 hash# 2073723837205253842 Put id# [15:1:3:0:0:8549:1] totalSize# 21485377 blobs# 70 hash# 5332112190907439195 Put id# [6:1:2:0:0:416965:1] totalSize# 21902342 blobs# 71 hash# 5206440051932571894 Put id# [40:1:1:0:0:259395:1] totalSize# 22161737 blobs# 72 hash# 2821551336787634274 Compact Put id# [65:1:2:0:0:299019:1] totalSize# 22460756 blobs# 73 hash# 15413438744819369987 Put id# [86:1:1:0:0:376912:1] totalSize# 22837668 blobs# 74 hash# 17047094892298211659 Put id# [93:1:1:0:0:167672:1] totalSize# 23005340 blobs# 75 hash# 7290417154937757560 Trim Put id# [1:1:2:0:0:95552:1] totalSize# 23100892 blobs# 76 hash# 2380498250923672443 Put id# [73:1:1:0:0:454070:1] totalSize# 23554962 blobs# 77 hash# 3930534692995498726 Put id# [83:1:1:0:0:402236:1] totalSize# 23957198 blobs# 78 hash# 6469036163354441045 Put id# [16:1:1:0:0:399491:1] totalSize# 24356689 blobs# 79 hash# 6693645486420651072 Put id# [22:1:1:0:0:584307:1] totalSize# 24940996 blobs# 80 hash# 2833485101995636214 Put id# [24:1:1:0:0:357837:1] totalSize# 25298833 blobs# 81 hash# 267355426291405884 Put id# [2:1:1:0:0:541607:1] totalSize# 25840440 blobs# 82 hash# 4563919059981101130 Trim Put id# [100:1:2:0:0:407217:1] totalSize# 26247657 blobs# 83 hash# 10839143960885371379 Put id# [84:1:1:0:0:382785:1] totalSize# 26630442 blobs# 84 hash# 16382307785510789357 Trim Put id# [21:1:2:0:0:332078:1] totalSize# 26962520 blobs# 85 hash# 12908018699148222710 Put id# [11:1:1:0:0:86342:1] totalSize# 27048862 blobs# 86 hash# 8955484688198194354 Put id# [65:1:3:0:0:583247:1] totalSize# 27632109 blobs# 87 hash# 3351321328549155078 Put id# [31:1:1:0:0:105934:1] totalSize# 27738043 blobs# 88 hash# 13510923346864437522 Put id# [97:1:1:0:0:114510:1] totalSize# 27852553 blobs# 89 hash# 9839382477288550579 Put id# [50:1:2:0:0:354564:1] totalSize# 28207117 blobs# 90 hash# 16821650611683271517 Trim Put id# [78:1:2:0:0:281137:1] totalSize# 28488254 blobs# 91 hash# 11318248244125003059 Trim Put id# [10:1:1:0:0:210298:1] totalSize# 28698552 blobs# 92 hash# 17641018328585630136 Put id# [40:1:2:0:0:574037:1] totalSize# 29272589 blobs# 93 hash# 13127362407970128886 Put id# [74:1:1:0:0:6834:1] totalSize# 29279423 blobs# 94 hash# 8636363802762066876 Put id# [19:1:2:0:0:219764:1] totalSize# 29499187 blobs# 95 hash# 14861591569953295857 Put id# [88:1:1:0:0:527970:1] totalSize# 30027157 blobs# 96 hash# 9576054964135256698 Put id# [8:1:1:0:0:229137:1] totalSize# 30256294 blobs# 97 hash# 13978483542831867040 Put id# [31:1:2:0:0:318552:1] totalSize# 30574846 blobs# 98 hash# 14715618168655825583 Trim Put id# [87:1:1:0:0:327816:1] totalSize# 30902662 blobs# 99 hash# 13765822714536093449 Put id# [34:1:3:0:0:333363:1] totalSize# 31236025 blobs# 100 hash# 18060287502753093453 Put id# [21:1:3:0:0:188211:1] totalSize# 31424236 blobs# 101 hash# 18229966035608658894 Put id# [77:1:2:0:0:73313:1] totalSize# 31497549 blobs# 102 hash# 12812062236901997429 Put id# [27:1:2:0:0:186196:1] totalSize# 31683745 blobs# 103 hash# 14703626872233652248 Put id# [84:1:2:0:0:231497:1] totalSize# 31915242 blobs# 104 hash# 14182187979693542536 Change MinHugeBlobSize# 262144 Put id# [80:1:1:0:0:460462:1] totalSize# 32375704 blobs# 105 hash# 15924017257314761817 Put id# [19:1:3:0:0:513404:1] totalSize# 32889108 blobs# 106 hash# 4926754751582669320 Put id# [9:1:2:0:0:490904:1] totalSize# 33380012 blobs# 107 hash# 13564709796829277941 Change MinHugeBlobSize# 12288 Put id# [86:1:2:0:0:590070:1] totalSize# 33970082 blobs# 108 hash# 15300004335110438509 Put id# [47:1:2:0:0:30876:1] totalSize# 34000958 blobs# 109 hash# 12080201567958070971 Put id# [15:1:4:0:0:383146:1] totalSize# 34384104 blobs# 110 hash# 16765410431603602831 Put id# [19:1:4:0:0:431475:1] totalSize# 34815579 blobs# 111 hash# 2482641335666312849 Put id# [33:1:2:0:0:197248:1] totalSize# 35012827 blobs# 112 hash# 8036426599874514387 Put id# [13:1:3:0:0:623784:1] totalSize# 35636611 blobs# 113 hash# 11493068963281020601 Trim Put id# [51:1:2:0:0:169285:1] totalSize# 35805896 blobs# 114 hash# 14738183269425078167 Put id# [25:1:3:0:0:511649:1] totalSize# 36317545 blobs# 115 hash# 8541178690929671575 Put id# [82:1:3:0:0:31113:1] totalSize# 36348658 blobs# 116 hash# 13211295428868473232 Put id# [61:1:3:0:0:615053:1] totalSize# 36963711 blobs# 117 hash# 16224603573112728679 Compact Put id# [62:1:1:0:0:13360:1] totalSize# 36977071 blobs# 118 hash# 15448193531639167986 Trim Put id# [55:1:2:0:0:574854:1] totalSize# 37551925 blobs# 119 hash# 12412444154508134185 Put id# [26:1:2:0:0:125768:1] totalSize# 37677693 blobs# 120 hash# 1216034369071432688 Put id# [94:1:2:0:0:569862:1] totalSize# 38247555 blobs# 121 hash# 13891149448889880318 Put id# [85:1:2:0:0:412548:1] totalSize# 38660103 blobs# 122 hash# 5333685225752446471 Put id# [80:1:2:0:0:34283:1] totalSize# 38694386 blobs# 123 hash# 4577139322284933024 Put id# [95:1:1:0:0:117185:1] totalSize# 38811571 blobs# 124 hash# 4860578392918079690 Put id# [20:1:4:0:0:218055:1] totalSize# 39029626 blobs# 125 hash# 6329532605569332900 Put id# [82:1:4:0:0:133454:1] totalSize# 39163080 blobs# 126 hash# 508251926020190453 Trim Put id# [79:1:4:0:0:168293:1] totalSize# 39331373 blobs# 127 hash# 5699208959068296516 Put id# [14:1:1:0:0:629594:1] totalSize# 39960967 blobs# 128 hash# 2323945444553353945 Put id# [51:1:3:0:0:132862:1] totalSize# 40093829 blobs# 129 hash# 1705361350042872443 Put id# [80:1:3:0:0:233162:1] totalSize# 40326991 blobs# 130 hash# 8070486033231847613 Trim Put id# [45:1:1:0:0:191411:1] totalSize# 40518402 blobs# 131 hash# 10502534797441164278 Put id# [89:1:2:0:0:587345:1] totalSize# 41105747 blobs# 132 hash# 12548546032193412659 Change MinHugeBlobSize# 524288 Trim Put id# [53:1:1:0:0:152710:1] totalSize# 41258457 blobs# 133 hash# 15176672267548377956 Put id# [78:1:3:0:0:208583:1] totalSize# 41467040 blobs# 134 hash# 3640859958077219859 Trim Put id# [19:1:5:0:0:463402:1] totalSize# 41930442 blobs# 135 hash# 8418895853404784950 Put id# [99:1:1:0:0:353954:1] totalSiz ... obSize# 98304 Trim Put id# [80:1:373:0:0:439039:1] totalSize# 11992797611 blobs# 36748 hash# 5814413032740808580 Put id# [73:1:399:0:0:45387:1] totalSize# 11992842998 blobs# 36749 hash# 2115162825511730134 Change MinHugeBlobSize# 32768 Put id# [41:1:375:0:0:635629:1] totalSize# 11993478627 blobs# 36750 hash# 57142042212170542 Put id# [70:1:328:0:0:164327:1] totalSize# 11993642954 blobs# 36751 hash# 11879387368919689552 Put id# [78:1:363:0:0:454309:1] totalSize# 11994097263 blobs# 36752 hash# 2888980960505700681 Put id# [81:1:373:0:0:40182:1] totalSize# 11994137445 blobs# 36753 hash# 13831339796745608581 Put id# [94:1:358:0:0:380000:1] totalSize# 11994517445 blobs# 36754 hash# 4718740763949378001 Change MinHugeBlobSize# 196608 Put id# [12:1:373:0:0:125782:1] totalSize# 11994643227 blobs# 36755 hash# 7983278662500179296 Trim Put id# [48:1:357:0:0:270140:1] totalSize# 11994913367 blobs# 36756 hash# 16773001492270444714 Put id# [87:1:376:0:0:409027:1] totalSize# 11995322394 blobs# 36757 hash# 10868403375677965700 Put id# [1:1:355:0:0:644232:1] totalSize# 11995966626 blobs# 36758 hash# 1584875940247291230 Put id# [99:1:413:0:0:561078:1] totalSize# 11996527704 blobs# 36759 hash# 16326942638176149131 Put id# [27:1:353:0:0:51207:1] totalSize# 11996578911 blobs# 36760 hash# 5967928413646245324 Trim Put id# [76:1:361:0:0:598737:1] totalSize# 11997177648 blobs# 36761 hash# 6561603931610515647 Put id# [57:1:328:0:0:45725:1] totalSize# 11997223373 blobs# 36762 hash# 16758517970382467163 Put id# [52:1:388:0:0:613094:1] totalSize# 11997836467 blobs# 36763 hash# 17387138053153045313 Put id# [93:1:370:0:0:109710:1] totalSize# 11997946177 blobs# 36764 hash# 8303144663462665397 Trim Put id# [85:1:422:0:0:387985:1] totalSize# 11998334162 blobs# 36765 hash# 15634829265929674404 Trim Put id# [78:1:364:0:0:320981:1] totalSize# 11998655143 blobs# 36766 hash# 15056893981595297386 Change MinHugeBlobSize# 131072 Put id# [39:1:361:0:0:354696:1] totalSize# 11999009839 blobs# 36767 hash# 8171995126022282611 Put id# [98:1:348:0:0:11511:1] totalSize# 11999021350 blobs# 36768 hash# 5173736575616284695 Put id# [49:1:396:0:0:142163:1] totalSize# 11999163513 blobs# 36769 hash# 17169992999303939040 Put id# [35:1:358:0:0:432453:1] totalSize# 11999595966 blobs# 36770 hash# 12248609283048640095 Put id# [23:1:383:0:0:511797:1] totalSize# 12000107763 blobs# 36771 hash# 5603815691603983877 Change MinHugeBlobSize# 196608 Put id# [89:1:360:0:0:302208:1] totalSize# 12000409971 blobs# 36772 hash# 7069753353169489352 Put id# [91:1:355:0:0:486093:1] totalSize# 12000896064 blobs# 36773 hash# 11147106222558386707 Put id# [74:1:379:0:0:236781:1] totalSize# 12001132845 blobs# 36774 hash# 17188169036568342349 Put id# [48:1:358:0:0:89798:1] totalSize# 12001222643 blobs# 36775 hash# 15990309122703393239 Put id# [20:1:363:0:0:648713:1] totalSize# 12001871356 blobs# 36776 hash# 11279401136550618662 Put id# [7:1:395:0:0:649678:1] totalSize# 12002521034 blobs# 36777 hash# 18202728855094615758 Put id# [7:1:396:0:0:652153:1] totalSize# 12003173187 blobs# 36778 hash# 13704555318878272531 Put id# [27:1:354:0:0:266558:1] totalSize# 12003439745 blobs# 36779 hash# 13489045451847052304 Put id# [41:1:376:0:0:37533:1] totalSize# 12003477278 blobs# 36780 hash# 17479128221974196051 Put id# [51:1:365:0:0:16962:1] totalSize# 12003494240 blobs# 36781 hash# 12596371107266108383 Put id# [48:1:359:0:0:40767:1] totalSize# 12003535007 blobs# 36782 hash# 7216361567535096216 Trim Put id# [69:1:328:0:0:292272:1] totalSize# 12003827279 blobs# 36783 hash# 14372095926429126168 Change MinHugeBlobSize# 32768 Put id# [89:1:361:0:0:560302:1] totalSize# 12004387581 blobs# 36784 hash# 9013015033321116387 Trim Put id# [33:1:406:0:0:342650:1] totalSize# 12004730231 blobs# 36785 hash# 4570152929745895813 Trim Put id# [48:1:360:0:0:593836:1] totalSize# 12005324067 blobs# 36786 hash# 12024268782229767054 Put id# [24:1:363:0:0:360632:1] totalSize# 12005684699 blobs# 36787 hash# 15536676960285228725 Put id# [33:1:407:0:0:599204:1] totalSize# 12006283903 blobs# 36788 hash# 8454744704201578233 Put id# [100:1:356:0:0:235037:1] totalSize# 12006518940 blobs# 36789 hash# 11731957146031490975 Put id# [51:1:366:0:0:607717:1] totalSize# 12007126657 blobs# 36790 hash# 13025371846001984889 Put id# [55:1:349:0:0:308366:1] totalSize# 12007435023 blobs# 36791 hash# 13919367466336080439 Put id# [40:1:397:0:0:360793:1] totalSize# 12007795816 blobs# 36792 hash# 17444968553023826389 Put id# [7:1:397:0:0:284305:1] totalSize# 12008080121 blobs# 36793 hash# 901739898786090434 Put id# [50:1:372:0:0:607292:1] totalSize# 12008687413 blobs# 36794 hash# 5992881506702719586 Trim Put id# [100:1:357:0:0:557534:1] totalSize# 12009244947 blobs# 36795 hash# 1338556920523444523 Put id# [95:1:390:0:0:396313:1] totalSize# 12009641260 blobs# 36796 hash# 10060468703418131561 Put id# [81:1:374:0:0:78605:1] totalSize# 12009719865 blobs# 36797 hash# 9131643187181545105 Put id# [21:1:376:0:0:462871:1] totalSize# 12010182736 blobs# 36798 hash# 5970571110191271535 Trim Put id# [64:1:335:0:0:203026:1] totalSize# 12010385762 blobs# 36799 hash# 14876986501629626935 Put id# [44:1:364:0:0:236822:1] totalSize# 12010622584 blobs# 36800 hash# 7685291795274238086 Put id# [87:1:377:0:0:459149:1] totalSize# 12011081733 blobs# 36801 hash# 1674600045750603933 Put id# [94:1:359:0:0:447190:1] totalSize# 12011528923 blobs# 36802 hash# 13920019611629522454 Put id# [43:1:378:0:0:95203:1] totalSize# 12011624126 blobs# 36803 hash# 5782349934861976657 Put id# [97:1:347:0:0:653590:1] totalSize# 12012277716 blobs# 36804 hash# 7870940206954097462 Put id# [94:1:360:0:0:503633:1] totalSize# 12012781349 blobs# 36805 hash# 1969524150408872464 Put id# [33:1:408:0:0:65552:1] totalSize# 12012846901 blobs# 36806 hash# 8110999992889066885 Put id# [70:1:329:0:0:305764:1] totalSize# 12013152665 blobs# 36807 hash# 15544933372833059099 Put id# [39:1:362:0:0:191462:1] totalSize# 12013344127 blobs# 36808 hash# 2614425966056063457 Trim Put id# [46:1:374:0:0:570076:1] totalSize# 12013914203 blobs# 36809 hash# 7741922254027020915 Put id# [44:1:365:0:0:633187:1] totalSize# 12014547390 blobs# 36810 hash# 2533730788117081595 Put id# [25:1:336:0:0:465314:1] totalSize# 12015012704 blobs# 36811 hash# 7675361390684355322 Put id# [93:1:371:0:0:337848:1] totalSize# 12015350552 blobs# 36812 hash# 14638226838951681307 Put id# [35:1:359:0:0:631258:1] totalSize# 12015981810 blobs# 36813 hash# 11077843564863328365 Trim Put id# [16:1:393:0:0:81635:1] totalSize# 12016063445 blobs# 36814 hash# 4851244322569753595 Put id# [43:1:379:0:0:138362:1] totalSize# 12016201807 blobs# 36815 hash# 14865024159081628588 Put id# [76:1:362:0:0:172606:1] totalSize# 12016374413 blobs# 36816 hash# 3087968256920836243 Put id# [40:1:398:0:0:654178:1] totalSize# 12017028591 blobs# 36817 hash# 3197240687114212265 Put id# [89:1:362:0:0:490879:1] totalSize# 12017519470 blobs# 36818 hash# 17659698682122995755 Put id# [9:1:364:0:0:594600:1] totalSize# 12018114070 blobs# 36819 hash# 18364601442154406985 Put id# [30:1:369:0:0:163604:1] totalSize# 12018277674 blobs# 36820 hash# 17532294483561836056 Put id# [37:1:337:0:0:322714:1] totalSize# 12018600388 blobs# 36821 hash# 1766579792618025676 Trim Put id# [40:1:399:0:0:406854:1] totalSize# 12019007242 blobs# 36822 hash# 5408469368050904929 Put id# [77:1:362:0:0:451060:1] totalSize# 12019458302 blobs# 36823 hash# 6783117314622319715 Put id# [4:1:404:0:0:86512:1] totalSize# 12019544814 blobs# 36824 hash# 10853739816909898478 Put id# [27:1:355:0:0:198134:1] totalSize# 12019742948 blobs# 36825 hash# 4267333662316550857 Put id# [68:1:378:0:0:410589:1] totalSize# 12020153537 blobs# 36826 hash# 10020386657947825794 Put id# [7:1:398:0:0:365393:1] totalSize# 12020518930 blobs# 36827 hash# 2635881009077876356 Put id# [24:1:364:0:0:240440:1] totalSize# 12020759370 blobs# 36828 hash# 875029714013926105 Trim Put id# [16:1:394:0:0:610445:1] totalSize# 12021369815 blobs# 36829 hash# 16815998613305308341 Put id# [56:1:361:0:0:62742:1] totalSize# 12021432557 blobs# 36830 hash# 6288205724575385290 Put id# [59:1:391:0:0:542841:1] totalSize# 12021975398 blobs# 36831 hash# 340827061577329813 Put id# [39:1:363:0:0:620122:1] totalSize# 12022595520 blobs# 36832 hash# 6625634963897894441 Put id# [49:1:397:0:0:430142:1] totalSize# 12023025662 blobs# 36833 hash# 4031835735790510269 Put id# [8:1:388:0:0:626413:1] totalSize# 12023652075 blobs# 36834 hash# 18003178463385498566 Change MinHugeBlobSize# 393216 Put id# [7:1:399:0:0:426747:1] totalSize# 12024078822 blobs# 36835 hash# 3351360013973664330 Put id# [14:1:371:0:0:486230:1] totalSize# 12024565052 blobs# 36836 hash# 9052650130984651582 Put id# [70:1:330:0:0:152990:1] totalSize# 12024718042 blobs# 36837 hash# 8910111819249098959 Put id# [72:1:376:0:0:258490:1] totalSize# 12024976532 blobs# 36838 hash# 17785899437948529469 Put id# [18:1:374:0:0:540817:1] totalSize# 12025517349 blobs# 36839 hash# 13172621299570463392 Put id# [60:1:397:0:0:118356:1] totalSize# 12025635705 blobs# 36840 hash# 9903083913199605531 Put id# [30:1:370:0:0:199882:1] totalSize# 12025835587 blobs# 36841 hash# 15603434257550977846 Put id# [92:1:401:0:0:257008:1] totalSize# 12026092595 blobs# 36842 hash# 15300156208041986288 Put id# [36:1:339:0:0:30235:1] totalSize# 12026122830 blobs# 36843 hash# 11638961937202844344 Put id# [23:1:384:0:0:48102:1] totalSize# 12026170932 blobs# 36844 hash# 14238669370949023580 Put id# [5:1:355:0:0:490399:1] totalSize# 12026661331 blobs# 36845 hash# 4264094392110962684 Put id# [39:1:364:0:0:363930:1] totalSize# 12027025261 blobs# 36846 hash# 10379868685549243280 Trim Put id# [87:1:378:0:0:162795:1] totalSize# 12027188056 blobs# 36847 hash# 4139413311516023562 Put id# [66:1:352:0:0:50176:1] totalSize# 12027238232 blobs# 36848 hash# 884990141411072165 Put id# [7:1:400:0:0:88285:1] totalSize# 12027326517 blobs# 36849 hash# 16768419514198710379 Put id# [25:1:337:0:0:219888:1] totalSize# 12027546405 blobs# 36850 hash# 12141022153868313534 Put id# [75:1:371:0:0:509462:1] totalSize# 12028055867 blobs# 36851 hash# 6962420910735172554 Put id# [40:1:400:0:0:635585:1] totalSize# 12028691452 blobs# 36852 hash# 10039444917652739330 Put id# [77:1:363:0:0:77980:1] totalSize# 12028769432 blobs# 36853 hash# 8559748917275649370 Put id# [11:1:379:0:0:485932:1] totalSize# 12029255364 blobs# 36854 hash# 10025685328214208624 Put id# [10:1:348:0:0:20720:1] totalSize# 12029276084 blobs# 36855 hash# 14775073795882587640 Put id# [72:1:377:0:0:463571:1] totalSize# 12029739655 blobs# 36856 hash# 15427174617898735647 Put id# [78:1:365:0:0:544755:1] totalSize# 12030284410 blobs# 36857 hash# 5950977726881853056 Put id# [50:1:373:0:0:25755:1] totalSize# 12030310165 blobs# 36858 hash# 1673322245567994719 Put id# [19:1:357:0:0:468041:1] totalSize# 12030778206 blobs# 36859 hash# 12889334445369970779 Put id# [21:1:377:0:0:332709:1] totalSize# 12031110915 blobs# 36860 hash# 6740913150729856072 Put id# [12:1:374:0:0:184342:1] totalSize# 12031295257 blobs# 36861 hash# 6730244253277721214 Put id# [65:1:366:0:0:165325:1] totalSize# 12031460582 blobs# 36862 hash# 14920463443182327817 Compact Put id# [5:1:356:0:0:443763:1] totalSize# 12031904345 blobs# 36863 hash# 9966104873199300633 Put id# [12:1:375:0:0:409435:1] totalSize# 12032313780 blobs# 36864 hash# 1769333347807761647 Trim Compact Put id# [2:1:385:0:0:522789:1] totalSize# 12032836569 blobs# 36865 hash# 2214427339839371149 Put id# [20:1:364:0:0:393377:1] totalSize# 12033229946 blobs# 36866 hash# 7995518691914955118 Put id# [40:1:401:0:0:136750:1] totalSize# 12033366696 blobs# 36867 hash# 14387488960673509053 Put id# [34:1:393:0:0:624974:1] totalSize# 12033991670 blobs# 36868 hash# 2928151512970445999 Put id# [27:1:356:0:0:292308:1] totalSize# 12034283978 blobs# 36869 hash# 7080219653487651339 Trim Put id# [42:1:350:0:0:409262:1] totalSize# 12034693240 blobs# 36870 hash# 9818687848357432716 Trim Put id# [40:1:402:0:0:640819:1] totalSize# 12035334059 blobs# 36871 hash# 14670388640417097923 Validate before exit >> TExportToS3Tests::ChecksumsWithCompression |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> MonPage::OptionsNoContent [GOOD] Test command err: 2026-02-19T08:11:14.104933Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485922095361606:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:14.104972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:14.449379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:14.467635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:14.467731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:14.552157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:14.560706Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:14.569742Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485922095361371:2081] 1771488674072287 != 1771488674072290 2026-02-19T08:11:14.640938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:14.753900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:14.753935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:14.753945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:14.754049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:15.046276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:15.053666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:15.104125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:15.119575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:15.138059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:15.142738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:11:20.906293Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485948994488136:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:20.906364Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:20.945967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:21.069926Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:21.235339Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:21.241900Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485948994488103:2081] 1771488680902637 != 1771488680902640 2026-02-19T08:11:21.279101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:21.279202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:21.327829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:21.394011Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:21.638598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:21.638631Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:21.638641Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:21.638730Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:21.928304Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:22.359389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:22.466875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:22.472948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::NoUseAuthOk [GOOD] Test command err: 2026-02-19T08:11:13.238315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485918987420709:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:13.238359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:13.789854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:13.832093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:13.832213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:13.970471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:14.019588Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:14.021359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485918987420678:2081] 1771488673234416 != 1771488673234419 2026-02-19T08:11:14.038071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:14.139055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:14.139084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:14.139095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:14.139199Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:14.257592Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:14.500538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:14.509192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:11:14.586973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:14.601629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:14.609211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:11:20.438410Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485948936533930:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:20.438469Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:20.531373Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:20.744530Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:20.752163Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:20.757909Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485948936533894:2081] 1771488680436927 != 1771488680436930 2026-02-19T08:11:20.776127Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:20.776220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:20.857518Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:21.147916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:21.170513Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:21.170537Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:21.170549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:21.170643Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:21.469910Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:21.569675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:21.586757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:21.639542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:21.651031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:11:21.667937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ydb-core-tx-schemeshard-ut_truncate_table_reboots |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ydb-core-tx-schemeshard-ut_truncate_table_reboots |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_truncate_table_reboots/ydb-core-tx-schemeshard-ut_truncate_table_reboots >> TExportToS3Tests::ReplicationExportWithStaticCredentials >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser |90.0%| [TA] $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous >> LdapAuthProviderTests::CanFetchGroupsWithDelayUpdateSecurityState [GOOD] |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] |90.0%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.0%| [TA] {RESULT} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system >> TExportToS3Tests::ChecksumsWithCompression [GOOD] |90.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTests::CanFetchGroupsWithDelayUpdateSecurityState [GOOD] Test command err: 2026-02-19T08:11:15.517692Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485927637800648:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:15.519479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:15.794508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:15.794610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:15.823825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:15.847348Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:15.850177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485927637800408:2081] 1771488675486270 != 1771488675486273 2026-02-19T08:11:15.863928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:15.994537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:15.994575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:15.994583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:15.994682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:16.017830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:16.173866Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:16.174989Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:16.175010Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:16.176111Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:30273, port: 30273 2026-02-19T08:11:16.176173Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-02-19T08:11:16.194477Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:16.246056Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:16.290227Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:16.290736Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:16.290827Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:16.334422Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:16.382119Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:16.384886Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****UgXw (19425B4C) () has now valid token of ldapuser@ldap 2026-02-19T08:11:16.513917Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:19.317906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:19.318128Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:19.336251Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:19.338185Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485941793550615:2081] 1771488679216228 != 1771488679216231 2026-02-19T08:11:19.364377Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:19.364450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:19.377500Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:19.451871Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:19.451900Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:19.451906Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:19.451989Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:19.579643Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:19.587660Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:19.590805Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:19.590839Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:19.591473Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:22689, port: 22689 2026-02-19T08:11:19.591549Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:19.592955Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:19.641433Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:19.642659Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:19.642718Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:19.690088Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:19.738172Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:19.738972Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****Z5ow (4697D23C) () has now valid token of ldapuser@ldap 2026-02-19T08:11:23.609744Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:23.610048Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:23.613866Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:23.614689Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608485960572567618:2081] 1771488683414970 != 1771488683414973 2026-02-19T08:11:23.622609Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:23.622685Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:23.653665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:23.776094Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:23.776124Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:23.776130Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:23.776202Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:23.863724Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:23.976655Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:23.981016Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:23.981050Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:23.981666Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://qqq:19059 ldap://localhost:19059 ldap://localhost:11111, port: 19059 2026-02-19T08:11:23.981745Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:23.990180Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:24.034002Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:24.034922Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:24.034965Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:24.082242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:24.134018Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:24.134829Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****OpdQ (ADD66C2C) () has now valid token of ldapuser@ldap 2026-02-19T08:11:28.064312Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:28.064447Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:28.181975Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:28.199694Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:28.199806Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:28.204717Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608485979875887867:2081] 1771488687932451 != 1771488687932454 2026-02-19T08:11:28.242878Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:28.442164Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:28.462416Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:28.462441Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:28.462448Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:28.462544Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:28.661845Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:28.661887Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:28.661952Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:875: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****IgLQ (010C2305)) 2026-02-19T08:11:29.001797Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:29.670537Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:29.670563Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1558: Handle deferred tokens for database: /Root 2026-02-19T08:11:29.670869Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:29.670889Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:29.671656Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:25407, port: 25407 2026-02-19T08:11:29.671717Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:29.672907Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:29.715424Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:29.716353Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:29.716400Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:29.758272Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:29.802458Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:29.803603Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****IgLQ (010C2305) () has now valid token of ldapuser@ldap >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> TExportToS3Tests::ReplicationExportWithStaticCredentials [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> TExportToS3Tests::Changefeeds >> LdapAuthProviderTests::CanNotFetchGroupsOverSaslExternalWithoutClientCert [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> TExportToS3Tests::AuditCancelledExport >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous >> KqpFulltextIndexes::DeleteRowReturning [GOOD] >> KqpFulltextIndexes::DeleteRowWithRelevance+Covered >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> KqpFulltextIndexes::SelectWithFulltextMatch+UTF8 [GOOD] >> KqpFulltextIndexes::SelectWithFulltextMatch-UTF8 >> KqpFulltextIndexes::SelectWithFulltextRelevanceB1FactorAndK1Factor [GOOD] >> KqpFulltextIndexes::SelectWithRelevanceContainsAndNgramWildcard+Edge-Covered |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |90.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin >> LdapAuthProviderTests::CanNotRequestWithEmptyBindPassword [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTests::CanNotFetchGroupsOverSaslExternalWithoutClientCert [GOOD] Test command err: 2026-02-19T08:11:15.884788Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485926647117735:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:15.884847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:16.199598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:16.247434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:16.247569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:16.302286Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:16.304929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485926647117704:2081] 1771488675883110 != 1771488675883113 2026-02-19T08:11:16.321295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:16.489394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:16.505269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:16.505291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:16.505298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:16.505383Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:16.761086Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:16.765354Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:16.765390Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:16.766262Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:8445, port: 8445 2026-02-19T08:11:16.767081Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:16.768003Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:208: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://127.0.0.1:8445. Invalid credentials 2026-02-19T08:11:16.768707Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****w_ng (77D01BF1) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://127.0.0.1:8445. Invalid credentials)' 2026-02-19T08:11:16.892873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:19.719285Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608485942511010381:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:19.721532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:19.764697Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:19.838625Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:19.840422Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485942511010354:2081] 1771488679716752 != 1771488679716755 2026-02-19T08:11:19.858169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:19.858268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:19.886843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:19.961293Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:19.961314Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:19.961320Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:19.961395Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:19.982201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:20.040420Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:20.044094Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:20.044128Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:20.044867Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:22289, port: 22289 2026-02-19T08:11:20.044968Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:20.045746Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:208: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://127.0.0.1:22289. Invalid credentials 2026-02-19T08:11:20.045941Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****ft_w (F4E85439) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://127.0.0.1:22289. Invalid credentials)' 2026-02-19T08:11:24.546449Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:24.546607Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:24.649172Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:24.662243Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:24.662323Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:24.708479Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:24.894099Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:24.954404Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:24.954427Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:24.954434Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:24.954521Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:25.049852Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:25.052343Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:25.052369Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:25.053003Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:8491, port: 8491 2026-02-19T08:11:25.053070Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:25.054096Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-02-19T08:11:25.054179Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:353: Could not perform search for filter &(uid=ldapuser)() on server ldap://127.0.0.1:8491. Bad search filter 2026-02-19T08:11:25.054344Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****KNWA (A42DF285) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://127.0.0.1:8491. Bad search filter)' 2026-02-19T08:11:29.691304Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608485986192170981:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:29.691364Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:29.717024Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:29.825090Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:30.117808Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:30.135286Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:30.136744Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608485986192170728:2081] 1771488689635032 != 1771488689635035 2026-02-19T08:11:30.182987Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:30.183064Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:30.348457Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:30.551205Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:30.551256Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:30.551265Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:30.551351Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:30.684401Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:30.720926Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:31.306133Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:31.306580Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:31.306605Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:31.307364Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldaps, uris: ldaps://127.0.0.1:19651, port: 19651 2026-02-19T08:11:31.307454Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:196: bind: Sasl EXTERNAL 135972090709568:error:1417C0C7:SSL routines:tls_process_client_certificate:peer did not return a certificate:contrib/libs/openssl/ssl/statem/statem_srvr.c:3727: 2026-02-19T08:11:31.321186Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:208: Could not perform initial LDAP bind for dn on server ldaps://127.0.0.1:19651. Can't contact LDAP server 2026-02-19T08:11:31.321687Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****lHeA (1359FBA5) () has now retryable error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn on server ldaps://127.0.0.1:19651. Can't contact LDAP server)' 2026-02-19T08:11:31.321997Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:31.322022Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:31.322853Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldaps, uris: ldaps://127.0.0.1:19651, port: 19651 2026-02-19T08:11:31.322914Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:196: bind: Sasl EXTERNAL 2026-02-19T08:11:31.323871Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:208: Could not perform initial LDAP bind for dn on server ldaps://127.0.0.1:19651. Can't contact LDAP server 2026-02-19T08:11:31.323954Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****lHeA (1359FBA5) () has now retryable error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn on server ldaps://127.0.0.1:19651. Can't contact LDAP server)' >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ReplicationExportWithStaticCredentials [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:43.699332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:43.699438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:43.699506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:43.699548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:43.699579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:43.699617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:43.699643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:43.699682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:43.699756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:43.699833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:43.700536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:43.700861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:43.781421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:43.781485Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:43.795984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:43.796596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:43.796800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:43.807908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:43.808651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:43.809356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:43.809683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:43.813107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:43.813277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:43.814430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:43.814488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:43.814589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:43.814629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:43.814681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:43.814860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:43.962192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.963954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:43.964020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:11:34.775991Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:11:34.776055Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:34.776169Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:34.778897Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:34.778991Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-02-19T08:11:34.779046Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-02-19T08:11:34.781698Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-02-19T08:11:34.781891Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000040 2026-02-19T08:11:34.782487Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.782898Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:34.783079Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:34.783154Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000040, at schemeshard: 72057594046678944 2026-02-19T08:11:34.783319Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-02-19T08:11:34.783433Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:34.783492Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:34.783558Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-02-19T08:11:34.783774Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:34.783861Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:34.783953Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:34.784013Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-02-19T08:11:34.784084Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-02-19T08:11:34.784152Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-02-19T08:11:34.784204Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710758:0 2026-02-19T08:11:34.784294Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:34.784352Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-02-19T08:11:34.784418Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:11:34.784481Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:11:34.785043Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.787158Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:34.787211Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:34.787405Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:11:34.787605Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:34.787646Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-02-19T08:11:34.787694Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-02-19T08:11:34.788660Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.788762Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.788813Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:34.788881Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:11:34.788952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:34.791960Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.792088Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.792128Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-02-19T08:11:34.792169Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:11:34.792215Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:11:34.792349Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-02-19T08:11:34.792426Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [8:128:2153] 2026-02-19T08:11:34.793105Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:11:34.793179Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:11:34.793293Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:34.796906Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.797502Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-02-19T08:11:34.797640Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-02-19T08:11:34.797778Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710758 2026-02-19T08:11:34.798595Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:11:34.800257Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:34.800404Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:824:2794] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTests::CanNotRequestWithEmptyBindPassword [GOOD] Test command err: 2026-02-19T08:11:01.940091Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485868020829003:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:01.940645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:02.150564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:02.169516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:02.169679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:02.208164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:02.239070Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485868020828972:2081] 1771488661938476 != 1771488661938479 2026-02-19T08:11:02.240089Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:02.327141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:02.327170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:02.327196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:02.327359Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:02.369294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:02.469874Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:02.471552Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:02.471574Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:02.472305Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:11335, port: 11335 2026-02-19T08:11:02.472825Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:02.482156Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:02.528914Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:02.529365Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:02.529412Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:02.578032Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:02.625411Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:02.626571Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****O4lw (D204AF1C) () has now valid token of ldapuser@ldap 2026-02-19T08:11:02.947131Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:06.940224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485868020829003:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:06.940330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:07.948887Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****O4lw (D204AF1C) 2026-02-19T08:11:07.949095Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:11335, port: 11335 2026-02-19T08:11:07.949222Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:07.949898Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:07.998044Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:387: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-02-19T08:11:07.998463Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:412: Try to get nested groups - tree traversal 2026-02-19T08:11:07.998500Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:08.042005Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:08.086019Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:434: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-02-19T08:11:08.086822Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****O4lw (D204AF1C) () has now valid token of ldapuser@ldap 2026-02-19T08:11:13.327720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:13.327865Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:13.437106Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:13.448506Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485915871673988:2081] 1771488673260668 != 1771488673260671 2026-02-19T08:11:13.506419Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:13.506509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:13.584639Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:13.591769Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:13.878071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:13.878097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:13.878103Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:13.878203Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:14.009074Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:14.012560Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:14.012597Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:14.013250Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:6070, port: 6070 2026-02-19T08:11:14.013311Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:14.014105Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:14.058216Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****73Xg (28CA5BCB) () has now valid token of ldapuser@ldap 2026-02-19T08:11:14.297515Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:18.281902Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****73Xg (28CA5BCB) 2026-02-19T08:11:18.282018Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:261: init: scheme: ldap, uris: ldap://127.0.0.1:6070, port: 6070 2026-02-19T08:11:18.282090Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:199: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-02-19T08:11:18.288060Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:338: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-02-19T08:11:18.334420Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****73Xg (28CA5BCB) () has now valid token of ldapuser@ldap 2026-02-19T08:11:21.366005Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****73Xg (28CA5BCB) 2026-02-19T08:11:25.335615Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:25.338862Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:25.478803Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:25.536705Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:25.536784Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:25.561372Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:25.727177Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:25.776354Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:25.776377Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:25.776383Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:25.776462Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:25.932341Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:25.935872Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:25.935901Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:25.936616Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****Q0kg (913651C5) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2026-02-19T08:11:30.767283Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608485991511788293:2178];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:30.767332Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:30.845321Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:31.086394Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:31.118442Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:31.118543Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:31.133977Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608485991511788153:2081] 1771488690748054 != 1771488690748057 2026-02-19T08:11:31.155174Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:31.166166Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:31.486584Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:31.486609Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:31.486615Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:31.486698Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:31.553879Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:11:31.565457Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:11:31.565495Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:11:31.566285Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****U0_Q (5751CE2F) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2026-02-19T08:11:31.584123Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:31.778074Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::Changefeeds [GOOD] >> TExportToS3Tests::AutoDropping >> YdbProxy::ReadNonExistentTopic [GOOD] >> TExportToS3Tests::CorruptedDecimalValue >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system >> KqpService::CloseSessionDuringNonFinalCleanup [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2026-02-19T08:11:22.733582Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485955514289518:2232];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:22.742708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:23.213636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:23.213764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:23.266791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:23.309885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485955514289319:2081] 1771488682679343 != 1771488682679346 2026-02-19T08:11:23.320698Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:23.320818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:23.517825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:23.714005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:23.798335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:23.798353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:23.798360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:23.798443Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:24.189453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:24.198065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:24.444096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:26.680791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485972694159647:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.680819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485972694159634:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.680879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485972694159648:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.680943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.682967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485972694159656:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.683073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:26.685135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:26.752767Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485972694159658:2636] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:11:26.760304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608485972694159654:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:11:26.760353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608485972694159655:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:11:26.825261Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485972694159705:2667] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:26.852563Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485972694159723:2675] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:27.718124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485955514289518:2232];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:27.718204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:27.778765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:28.256606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.990470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:11:30.484431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:11:31.284694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:11:33.978460Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486005881809275:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:33.978507Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:34.030100Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:34.146256Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:34.150777Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486005881809248:2081] 1771488693965585 != 1771488693965588 2026-02-19T08:11:34.164305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:34.172996Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:34.173081Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:34.203694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:34.431611Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:34.473844Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:34.473869Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:34.473876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:34.473950Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:34.771156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:34.791482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:34.871094Z node 2 :PQ_METACACHE ERROR: msgbus_server_pq_metacache.cpp:384: Handle SchemeCache response: result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 2 ResultSet [{ Path: Root/topic TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:11:34.962602Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TExportToS3Tests::CorruptedDecimalValue [GOOD] >> TExportToS3Tests::AutoDropping [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous >> YdbProxy::AlterTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionDuringNonFinalCleanup [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:28.045536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:28.176847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:28.213168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:28.213245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:28.213799Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:28.665530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:28.665676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:28.932424Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488684417407 != 1771488684417411 2026-02-19T08:11:28.951747Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:29.057933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:29.220700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:29.932347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:29.932395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:29.932423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:29.932709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:29.946743Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:30.764818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:30.831275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.104836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.544594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.962448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:33.315295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1922:3526], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:33.315750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:33.316657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1996:3545], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:33.316777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:33.349962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:33.516162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:33.805549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:34.081567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:34.389956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:34.671958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:35.031199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:35.417951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:35.847123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2806:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:35.847288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:35.847769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2810:4188], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:35.847868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:35.847947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2813:4191], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:35.854910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:36.049215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2815:4193], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:11:36.131209Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2877:4236] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:38.162936Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE= temp_dir_name# 844340f5-49ba-2219-13c7-a4aeee813264 trace_id# 2026-02-19T08:11:38.163693Z node 1 :KQP_ ... 6715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:3199:4469], trace_id# 2026-02-19T08:11:38.458402Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:11:38.460335Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# CANCELLED Issues# trace_id# 2026-02-19T08:11:38.460461Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# trace_id# 2026-02-19T08:11:38.460574Z node 1 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1214} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [1:3199:4469] TaskId# 1 trace_id# 2026-02-19T08:11:38.460677Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2832} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Waiting for shutdown pending_tasks# 0 pending_compute_actors# 1 trace_id# 2026-02-19T08:11:38.460906Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [1:3199:4469], TxId: 281474976715673, task: 1. Ctx: { CheckpointId : . TraceId : 01khtf8qxj6fpd5mjas9d81h8d. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:3193:4469], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-02-19T08:11:38.469131Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# WaitShutdownState ComputeActor# [1:3199:4469] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 1164 Tasks { TaskId: 1 CpuTimeUs: 511 Tables { TablePath: "/Root/EightShard" AffectedPartitions: 1 } ComputeCpuTimeUs: 29 BuildCpuTimeUs: 482 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771488698458 CurrentWaitInputTimeUs: 1517 UpdateTimeMs: 1771488698461 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:11:38.469324Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:3199:4469] 2026-02-19T08:11:38.469496Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:3193:4469] TxId: 281474976715673. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:11:38.469816Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2467} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, TEvTxResponse current_tx# 1 transactions_size# 1 status# CANCELLED trace_id# 2026-02-19T08:11:38.469908Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2498} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, TEvTxResponse has non-success status current_tx# 1 execution_type# Data status# CANCELLED trace_id# 2026-02-19T08:11:38.469980Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, Create QueryResponse for error on request, msg: status# CANCELLED issues# trace_id# 2026-02-19T08:11:38.470134Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2795} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, TxInfo status# Aborted kind# ReadOnly total_duration# 0 server_duration# 16.077 queries_count# 2 trace_id# 2026-02-19T08:11:38.470306Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1991} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, Sending to Executer span_id_size# 8 trace_id# 2026-02-19T08:11:38.470509Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2062} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, Created new KQP executer executer_id# [1:3202:4469] is_rollback# true trace_id# 2026-02-19T08:11:38.470613Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3378} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: ExecuteState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-02-19T08:11:38.471360Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715674. Resolved key sets: 0 2026-02-19T08:11:38.471484Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715674. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:11:38.471584Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:3202:4469] TxId: 281474976715674. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:11:38.471712Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:3202:4469] TxId: 281474976715674. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:11:38.471779Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:3202:4469] TxId: 281474976715674. Ctx: { TraceId: 01khtf8qxj6fpd5mjas9d81h8d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:11:38.472012Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3455} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, EndCleanup is_final# false trace_id# 2026-02-19T08:11:38.472116Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3170} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, LegacyTraceId: 01khtf8qxj6fpd5mjas9d81h8d, Sent query response back to proxy proxy_request_id# 7 proxy_id# [1:67:2114] trace_id# 2026-02-19T08:11:38.472607Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1991} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, Sending to Executer span_id_size# 8 trace_id# 2026-02-19T08:11:38.472835Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2062} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, Created new KQP executer executer_id# [1:3206:4469] is_rollback# true trace_id# 2026-02-19T08:11:38.472932Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3378} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-02-19T08:11:38.474466Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [1:3206:4469] TxId: 281474976715675. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [1:3205:4469] trace_id# 2026-02-19T08:11:38.474775Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:3206:4469] TxId: 281474976715675. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:11:38.475049Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3455} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: CleanupState, EndCleanup is_final# true trace_id# 2026-02-19T08:11:38.475115Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3469} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-02-19T08:11:38.475346Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3564} SessionId: ydb://session/3?node_id=1&id=YTlhMWVjZGQtMWNiYjUxMjYtZDYxNDkxODMtMmRlMmRiNzE=, ActorId: [1:3161:4469], ActorState: unknown state, Session actor destroyed trace_id# >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:30.863371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:31.048917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:31.078964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:31.079034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:31.079543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:31.380353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:31.380494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:31.464688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488687075382 != 1771488687075386 2026-02-19T08:11:31.473104Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:31.522963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:31.627987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:32.094251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:32.126440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:32.250471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:32.375480Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:32.375778Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:32.486874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:32.494013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:32.495897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:32.495998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:32.496073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:32.496487Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:32.496666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:32.496777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:32.507796Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:32.617325Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:32.617545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:32.617703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:32.622044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:32.622095Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:32.622136Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:32.622691Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:32.622806Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:32.622897Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:32.622951Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:32.622991Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:32.623030Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:32.623097Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:32.623283Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:32.623557Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:32.623669Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:32.638170Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:32.650395Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:32.650524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:32.785611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:32.791614Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:32.791738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:32.792095Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:32.792147Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:32.792211Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:32.792568Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:32.792774Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:32.793902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:32.794045Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:32.805886Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:32.806553Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:32.809539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:32.809636Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:32.810158Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:32.810270Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:32.812069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:32.812125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:32.812174Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:32.812264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:32.812334Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:32.812450Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:32.817911Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:32.820243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:32.820334Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:32.821310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... hard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:38.866468Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:38.866585Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:38.867090Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:38.867208Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:38.867304Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:38.867354Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:38.867407Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:38.867467Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:38.867973Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:889:2768], serverId# [2:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:38.868137Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:38.868413Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:38.868588Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:38.870511Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:38.882123Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:38.882254Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:39.001925Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:923:2788], serverId# [2:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:39.003404Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:39.003470Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.004058Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:39.004106Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:39.004147Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:39.004381Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:39.004508Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:39.004912Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:39.004978Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:39.005381Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:39.006022Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:39.007634Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:39.007683Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.008017Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:39.008079Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:39.009354Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:39.009411Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:39.009456Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:39.009514Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:39.009561Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:39.009646Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.010431Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:39.012136Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:39.012200Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:39.012963Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:39.018072Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2816], serverId# [2:960:2817], sessionId# [0:0:0] 2026-02-19T08:11:39.018225Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.029605Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.029688Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.030004Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:959:2816], serverId# [2:960:2817], sessionId# [0:0:0] 2026-02-19T08:11:39.031816Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:965:2822], serverId# [2:966:2823], sessionId# [0:0:0] 2026-02-19T08:11:39.031957Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.032118Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.032166Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.032382Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:965:2822], serverId# [2:966:2823], sessionId# [0:0:0] 2026-02-19T08:11:39.034568Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:970:2827], serverId# [2:971:2828], sessionId# [0:0:0] 2026-02-19T08:11:39.034711Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.034879Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.034920Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.035114Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:970:2827], serverId# [2:971:2828], sessionId# [0:0:0] 2026-02-19T08:11:39.036839Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:975:2832], serverId# [2:976:2833], sessionId# [0:0:0] 2026-02-19T08:11:39.036987Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.037140Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.037182Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.037371Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:975:2832], serverId# [2:976:2833], sessionId# [0:0:0] 2026-02-19T08:11:39.039354Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:980:2837], serverId# [2:981:2838], sessionId# [0:0:0] 2026-02-19T08:11:39.039501Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.039744Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.039789Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.039974Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:980:2837], serverId# [2:981:2838], sessionId# [0:0:0] 2026-02-19T08:11:39.041524Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:985:2842], serverId# [2:986:2843], sessionId# [0:0:0] 2026-02-19T08:11:39.041676Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:39.042184Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:39.042230Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.042448Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:985:2842], serverId# [2:986:2843], sessionId# [0:0:0] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2026-02-19T08:11:23.939411Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485959711585754:2242];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:23.939472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:24.423775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:24.494681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:24.494774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:24.646336Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:24.649977Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485959711585549:2081] 1771488683892987 != 1771488683892990 2026-02-19T08:11:24.650036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:24.678583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:24.949909Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:25.034032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:25.330718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:25.330751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:25.330763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:25.330854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:25.791998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:25.800001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:28.921429Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485981186422927:2503] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-02-19T08:11:28.941117Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485959711585754:2242];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:28.941245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:28.953672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.157722Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:31.157819Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:31.281183Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:31.285614Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608485995348163935:2081] 1771488691028650 != 1771488691028653 2026-02-19T08:11:31.322294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:31.322379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:31.329782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:31.351150Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:31.782909Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:31.874538Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:31.874575Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:31.874609Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:31.874702Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:32.111954Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:32.243958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:35.164348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:35.272294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:36.688492Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608486018688332386:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:36.690217Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:36.796414Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:36.876376Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:36.877783Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608486018688332359:2081] 1771488696687025 != 1771488696687028 2026-02-19T08:11:36.918707Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:36.918816Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:36.922819Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:36.993046Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:37.277953Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:37.277982Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:37.277989Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:37.278073Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:37.589814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:37.711527Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:37.790822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:11:37.834478Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486022983300643:2581] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:51.507773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:51.507864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:51.507926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:51.507959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:51.507994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:51.508035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:51.508059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:51.508096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:51.508163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:51.508257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:51.509038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:51.509318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:51.591900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:51.591945Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:51.603250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:51.604079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:51.604382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:51.616260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:51.616950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:51.617672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:51.618031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:51.621662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:51.621876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:51.623148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:51.623217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:51.623339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:51.623416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:51.623486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:51.623688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:51.763050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:51.764785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 9:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2026-02-19T08:11:40.104002Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000041 2026-02-19T08:11:40.105182Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:40.105326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:40.105383Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:489: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000041, at schemeshard: 72057594046678944 2026-02-19T08:11:40.105563Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710759:0 Propose -> ProposedWaitParts 2026-02-19T08:11:40.105772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:17423 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: B2872018-0669-4598-9478-5F4F6E85D203 amz-sdk-request: attempt=1 content-length: 119 content-md5: oX2vUQoa+8sc65ouCpRQqg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 119 2026-02-19T08:11:40.146583Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:40.146651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:11:40.146883Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:40.146916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 40 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000041 2026-02-19T08:11:40.147514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-02-19T08:11:40.147562Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:328: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:11:40.148097Z node 7 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation 2026-02-19T08:11:40.163039Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-02-19T08:11:40.163169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-02-19T08:11:40.163208Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2026-02-19T08:11:40.163249Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:11:40.163292Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:11:40.163389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2026-02-19T08:11:40.167608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2026-02-19T08:11:40.190040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 862 RawX2: 30064773900 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-02-19T08:11:40.190152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2026-02-19T08:11:40.190322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 862 RawX2: 30064773900 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-02-19T08:11:40.190459Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:303: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 862 RawX2: 30064773900 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-02-19T08:11:40.190577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:40.190635Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-02-19T08:11:40.190699Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:11:40.190772Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710759:0 ProposedWaitParts -> Done 2026-02-19T08:11:40.191003Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:40.193405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-02-19T08:11:40.193815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-02-19T08:11:40.193871Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2026-02-19T08:11:40.194034Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-02-19T08:11:40.194076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-02-19T08:11:40.194122Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-02-19T08:11:40.194158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-02-19T08:11:40.194199Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2026-02-19T08:11:40.194278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:127:2152] message: TxId: 281474976710759 2026-02-19T08:11:40.194339Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-02-19T08:11:40.194389Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:0 2026-02-19T08:11:40.194454Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710759:0 2026-02-19T08:11:40.194581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:11:40.196896Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2026-02-19T08:11:40.196980Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710759 2026-02-19T08:11:40.197231Z node 7 :EXPORT NOTICE: schemeshard_export__create.cpp:767: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2026-02-19T08:11:40.199301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:40.199372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:891:2849] TestWaitNotification: OK eventTxId 102 >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:41.128444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:41.128542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:41.128607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:41.128649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:41.128687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:41.128727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:41.128783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:41.128826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:41.128899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:41.128966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:41.129883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:41.130219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:41.230757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:41.230822Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:41.244781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:41.245382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:41.245592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:41.257313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:41.258077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:41.258807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:41.259087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:41.262244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:41.262430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:41.263619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:41.263676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:41.263809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:41.263866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:41.263918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:41.264109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:41.413661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.414657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.414775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.414849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.414916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.415628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... e 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710761 2026-02-19T08:11:40.227796Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:411: TExport::TTxProgress: DoExecute 2026-02-19T08:11:40.227819Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:1377: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-02-19T08:11:40.227843Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:1408: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-02-19T08:11:40.229294Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:431: TExport::TTxProgress: DoComplete 2026-02-19T08:11:40.229363Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:40.229403Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:882:2840] TestWaitNotification: OK eventTxId 102 2026-02-19T08:11:40.230335Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:11:40.230516Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 217us result status StatusSuccess 2026-02-19T08:11:40.231008Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 2 2026-02-19T08:11:40.231530Z node 8 :EXPORT DEBUG: schemeshard_export__forget.cpp:74: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-02-19T08:11:40.234117Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:411: TExport::TTxProgress: DoExecute 2026-02-19T08:11:40.234187Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:855: TExport::TTxProgress: Resume: id# 102 2026-02-19T08:11:40.234278Z node 8 :EXPORT INFO: schemeshard_export__create.cpp:636: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-02-19T08:11:40.234384Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:431: TExport::TTxProgress: DoComplete 2026-02-19T08:11:40.234544Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2026-02-19T08:11:40.234616Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:411: TExport::TTxProgress: DoExecute 2026-02-19T08:11:40.234673Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:978: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2026-02-19T08:11:40.234759Z node 8 :EXPORT INFO: schemeshard_export__create.cpp:628: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-02-19T08:11:40.234888Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:431: TExport::TTxProgress: DoComplete 2026-02-19T08:11:40.236898Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-02-19T08:11:40.237049Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-02-19T08:11:40.237246Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:40.239313Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:40.239591Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2026-02-19T08:11:40.239753Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7304: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-02-19T08:11:40.239837Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7306: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-02-19T08:11:40.239939Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:411: TExport::TTxProgress: DoExecute 2026-02-19T08:11:40.239980Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:1039: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-02-19T08:11:40.240048Z node 8 :EXPORT TRACE: schemeshard_export__create.cpp:1040: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-02-19T08:11:40.240160Z node 8 :EXPORT INFO: schemeshard_export__create.cpp:1223: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-02-19T08:11:40.241814Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:431: TExport::TTxProgress: DoComplete 2026-02-19T08:11:40.241964Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:40.242061Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-02-19T08:11:40.242121Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710761 2026-02-19T08:11:40.242167Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:411: TExport::TTxProgress: DoExecute 2026-02-19T08:11:40.242210Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:1377: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-02-19T08:11:40.242251Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:1408: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-02-19T08:11:40.244918Z node 8 :EXPORT DEBUG: schemeshard_export__create.cpp:431: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2026-02-19T08:11:40.245146Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:11:40.245200Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:11:40.245556Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:11:40.245678Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:40.245754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:1106:3059] TestWaitNotification: OK eventTxId 102 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/prefixed_vector/ydb-core-kqp-ut-indexes-prefixed_vector |90.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/prefixed_vector/ydb-core-kqp-ut-indexes-prefixed_vector |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/prefixed_vector/ydb-core-kqp-ut-indexes-prefixed_vector >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> KqpFulltextIndexes::SelectWithRelevanceContainsAndNgramWildcard+Edge-Covered [GOOD] >> KqpFulltextIndexes::SelectWithRelevanceContainsAndNgramWildcard+Edge+Covered >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin >> ReadOnlyVDisk::TestWrites >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> KqpFulltextIndexes::SelectWithFulltextMatch-UTF8 [GOOD] >> KqpFulltextIndexes::SelectWithFulltextMatchAndEdgeNgramWildcard >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> KqpFulltextIndexes::DeleteRowWithRelevance+Covered [GOOD] >> KqpFulltextIndexes::DeleteRowWithRelevance-Covered >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> test_restarts.py::test_basic [FAIL] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [FAIL] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestReads |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |90.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:10:40.897325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:40.897414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:40.897512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:40.897555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:40.897592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:40.897642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:40.897677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:40.897720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:40.897783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:40.897868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:40.898755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:40.899073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:40.982722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:10:40.982786Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:40.997075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:41.002190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:41.002401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:41.026278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:41.029833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:41.030636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:41.031006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:41.035515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:41.035676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:41.036964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:41.037033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:41.037177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:41.037232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:41.037286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:41.037569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:41.191010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.191874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:10:41.192925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... move publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:45.642333Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.642486Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.642538Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2026-02-19T08:11:45.642591Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-02-19T08:11:45.642647Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:11:45.642784Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-02-19T08:11:45.644908Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:45.644970Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-02-19T08:11:45.645018Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-02-19T08:11:45.649575Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-02-19T08:11:45.649772Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:11:45.649994Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-02-19T08:11:45.650457Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:45.650597Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:11:45.650659Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-02-19T08:11:45.650834Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:181: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-02-19T08:11:45.650930Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-02-19T08:11:45.650983Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:45.651055Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-02-19T08:11:45.651110Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:45.651202Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:11:45.651287Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:45.651341Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-02-19T08:11:45.651403Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-02-19T08:11:45.651466Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-02-19T08:11:45.651530Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710761:0 2026-02-19T08:11:45.651632Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:11:45.651702Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-02-19T08:11:45.651760Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:11:45.651816Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:11:45.652590Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.659441Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:45.659575Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:45.659774Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:11:45.659929Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:45.659972Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-02-19T08:11:45.660017Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-02-19T08:11:45.661096Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.661209Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.661271Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-02-19T08:11:45.661350Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:11:45.661418Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:11:45.663794Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.663901Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.663942Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-02-19T08:11:45.663983Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:11:45.664026Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:11:45.664150Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-02-19T08:11:45.664234Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [8:128:2153] 2026-02-19T08:11:45.668288Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.675089Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-02-19T08:11:45.675265Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-02-19T08:11:45.675347Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710761 2026-02-19T08:11:45.683623Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:11:45.683750Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:1006:2957] TestWaitNotification: OK eventTxId 102 >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin >> ReadOnlyVDisk::TestSync >> ReadOnlyVDisk::TestStorageLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:34.560610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:34.708470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:34.731134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:34.731244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:34.731793Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:35.053502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:35.053662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:35.141223Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488691298200 != 1771488691298204 2026-02-19T08:11:35.155982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:35.211567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:35.326012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:35.698584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:35.718417Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:35.832190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:35.887024Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:35.887318Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:35.967124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:35.967413Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:35.969424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:35.969540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:35.969655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:35.970129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:35.970281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:35.970389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:35.982421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:36.023071Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:36.023324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:36.023507Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:36.023573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:36.023618Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:36.023660Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.024519Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:36.024643Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:36.024737Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.024787Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:36.024830Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:36.024881Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.024963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:36.025108Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:36.025430Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:36.025545Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:36.028005Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:36.041688Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:36.041865Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:36.180322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:36.196837Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:36.197031Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.197484Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.197554Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:36.197662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:36.198039Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:36.198258Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:36.199204Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.199317Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:36.201986Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:36.202562Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:36.205299Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:36.205365Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.206127Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:36.206238Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.208371Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.208427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:36.208535Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:36.208622Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:36.208685Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:36.208790Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.214635Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:36.216785Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:36.216870Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:36.217996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:47.785170Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:47.785499Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:47.785696Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:47.788044Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:47.788123Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:47.788698Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:47.789204Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:47.799780Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:47.799886Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:47.801483Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:47.801591Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:47.803545Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:47.803612Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:47.803727Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:47.803816Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:47.803880Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:47.803993Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:47.809782Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:47.811907Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:47.812007Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:47.812889Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:47.829292Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:47.829409Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:47.833960Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:47.834972Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:47.835239Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:47.840881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:47.852440Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:47.994408Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:47.998044Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:48.038334Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:48.193276Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:48.193813Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:48.194008Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-02-19T08:11:48.205080Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.211834Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:48.213231Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:48.225878Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:48.225980Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.226241Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:48.226289Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:48.226583Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.226634Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.226686Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:48.226755Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.226829Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:48.227937Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:48.228270Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:48.228486Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.228534Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:48.228578Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:48.228810Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:48.228871Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.229502Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:48.230331Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:48.230468Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:48.230523Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:48.303570Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:48.303640Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:48.303804Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.303846Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:48.303892Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:48.304020Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.304079Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.304129Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:38.736113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:38.856553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:38.877935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:38.878028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:38.878673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:39.194042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:39.194183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:39.274412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488695611720 != 1771488695611724 2026-02-19T08:11:39.281635Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:39.330879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:39.440210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:39.727787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:39.741143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:39.851639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:39.899934Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:39.900203Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.949146Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.949275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.950866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:39.950942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:39.951004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:39.951333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.951482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.951558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:39.965125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:40.008256Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:40.008426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:40.008537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:40.008574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.008606Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:40.008639Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.009071Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:40.009160Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:40.009215Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.009250Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.009286Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:40.009330Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.009402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:40.009527Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:40.010005Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:40.010094Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:40.012049Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.023015Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:40.023124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:40.149011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:40.154197Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:40.154287Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.154588Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.154632Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:40.154681Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:40.154954Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:40.155104Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:40.155817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.155892Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:40.158318Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:40.158754Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.160846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:40.160898Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.161212Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:40.161294Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.162700Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.162744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.162787Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:40.162864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:40.162926Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:40.163013Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.168439Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.172238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:40.172336Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:40.173445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... ain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-02-19T08:11:48.211474Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-02-19T08:11:48.211532Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.254677Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1460:3228] 2026-02-19T08:11:48.254990Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:48.269548Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:48.269770Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:48.271325Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-02-19T08:11:48.271412Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037894 2026-02-19T08:11:48.271467Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037894 2026-02-19T08:11:48.271804Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:48.271943Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:48.272014Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037894 persisting started state actor id [2:1475:3228] in generation 1 2026-02-19T08:11:48.295254Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:48.297834Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2026-02-19T08:11:48.297976Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:48.298057Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037894, actorId: [2:1477:3238] 2026-02-19T08:11:48.298089Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037894 2026-02-19T08:11:48.298120Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2026-02-19T08:11:48.298153Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:11:48.298576Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2026-02-19T08:11:48.298693Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2026-02-19T08:11:48.298797Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-02-19T08:11:48.298831Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.298864Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2026-02-19T08:11:48.298897Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-02-19T08:11:48.298981Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1458:3227], serverId# [2:1462:3229], sessionId# [0:0:0] 2026-02-19T08:11:48.299334Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:11:48.299566Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2026-02-19T08:11:48.299637Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2026-02-19T08:11:48.300122Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-02-19T08:11:48.310933Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:11:48.311049Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:48.447882Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1483:3244], serverId# [2:1485:3246], sessionId# [0:0:0] 2026-02-19T08:11:48.448442Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715663 at step 5000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 5000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2026-02-19T08:11:48.448485Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:11:48.448958Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-02-19T08:11:48.448996Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:48.449032Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [5000:281474976715663] in PlanQueue unit at 72075186224037894 2026-02-19T08:11:48.449281Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 5000:281474976715663 keys extracted: 0 2026-02-19T08:11:48.449392Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:48.450137Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-02-19T08:11:48.450209Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 44] schema version# 1 2026-02-19T08:11:48.450543Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:48.450888Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.452531Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 4500 2026-02-19T08:11:48.452577Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:11:48.453501Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 5000} 2026-02-19T08:11:48.453578Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-02-19T08:11:48.456085Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-02-19T08:11:48.456131Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037894 2026-02-19T08:11:48.456173Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037894 2026-02-19T08:11:48.456246Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [5000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:48.456298Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2026-02-19T08:11:48.456368Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:11:48.457039Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-02-19T08:11:48.457139Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:11:48.457237Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:11:48.457296Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2026-02-19T08:11:48.457351Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-02-19T08:11:48.457609Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-02-19T08:11:48.457687Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.458396Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 5000 2026-02-19T08:11:48.461416Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2026-02-19T08:11:48.461481Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-02-19T08:11:48.468182Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1513:3268], serverId# [2:1514:3269], sessionId# [0:0:0] 2026-02-19T08:11:48.468398Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1513:3268], serverId# [2:1514:3269], sessionId# [0:0:0] 2026-02-19T08:11:48.469924Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1518:3273], serverId# [2:1519:3274], sessionId# [0:0:0] 2026-02-19T08:11:48.470091Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1518:3273], serverId# [2:1519:3274], sessionId# [0:0:0] 2026-02-19T08:11:48.472112Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:1523:3278], serverId# [2:1524:3279], sessionId# [0:0:0] 2026-02-19T08:11:48.472397Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1523:3278], serverId# [2:1524:3279], sessionId# [0:0:0] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:40.842318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:40.964288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:40.983003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:40.983075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:40.983522Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:41.265024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:41.265144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:41.361444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488698066248 != 1771488698066252 2026-02-19T08:11:41.371557Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:41.423163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:41.556308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:41.880244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:41.894396Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:42.008804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:42.051557Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:42.051829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:42.099033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:42.099154Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:42.100731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:42.100828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:42.100908Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:42.101266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:42.101395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:42.101479Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:42.112140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:42.148945Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:42.149132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:42.149244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:42.149287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:42.149323Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:42.149356Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:42.149902Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:42.150011Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:42.150078Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:42.150122Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:42.150174Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:42.150215Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:42.150279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:42.150393Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:42.150642Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:42.150741Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:42.152420Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:42.166286Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:42.166390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:42.296161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:42.300719Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:42.300804Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:42.301073Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:42.301115Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:42.301160Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:42.301448Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:42.301588Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:42.302342Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:42.302445Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:42.304383Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:42.304873Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:42.309399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:42.309496Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:42.309850Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:42.309942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:42.313064Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:42.313113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:42.313154Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:42.313238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:42.313289Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:42.313390Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:42.319549Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:42.321194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:42.321261Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:42.322024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:48.664143Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:48.664366Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:48.664508Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:48.664897Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.664962Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:48.665525Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:48.665958Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.667609Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:48.667659Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.668018Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:48.668086Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.669692Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.669753Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:48.669802Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:48.669860Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:48.669913Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:48.669983Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.670537Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.672176Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:48.672243Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:48.673035Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:48.681462Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.681576Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.681666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.683020Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.683227Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.687156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:48.691708Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.838997Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.842281Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:48.880918Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:48.977024Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1059:2878], serverId# [2:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:48.977387Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:48.977574Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-02-19T08:11:48.990234Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.994291Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:48.995129Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:49.006273Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:49.006356Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:49.006658Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:49.006698Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:49.006820Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:49.006915Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.006958Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:49.007002Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:49.007052Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.007869Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:49.008172Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:49.008345Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.008389Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.008430Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:49.008631Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.008685Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.009253Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:49.009479Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:49.009594Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:49.009644Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:49.040128Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:49.040194Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:49.040346Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.040381Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.040418Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:49.040529Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:49.040585Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.040625Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous >> ReadOnlyVDisk::TestGetWithMustRestoreFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:35.240149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:35.390025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:35.409943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:35.410043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:35.410443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:35.739705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:35.739834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:35.855654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488691393418 != 1771488691393422 2026-02-19T08:11:35.864786Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:35.918426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:36.035759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:36.360789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:36.377251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:36.490282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:36.530637Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:36.530961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:36.584798Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:36.584951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:36.586777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:36.586872Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:36.586958Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:36.587374Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:36.587552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:36.587652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:36.599988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:36.646058Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:36.646319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:36.646514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:36.646574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:36.646618Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:36.646659Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.647338Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:36.647492Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:36.647582Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.647645Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:36.647700Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:36.647752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.647831Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:36.647973Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:36.648250Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:36.648360Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:36.653289Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:36.664725Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:36.664849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:36.804108Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:36.820896Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:36.820997Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.821319Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.821367Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:36.821419Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:36.821840Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:36.822015Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:36.822751Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:36.822833Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:36.824841Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:36.825294Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:36.827543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:36.827592Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.827944Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:36.828038Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.829515Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:36.829558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:36.829632Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:36.829737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:36.829798Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:36.829890Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:36.834204Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:36.836366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:36.836438Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:36.837201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:48.797527Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:48.801980Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:48.802195Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:48.802492Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.802567Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:48.803088Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:48.803555Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.810126Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:48.810206Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.811798Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:48.811905Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.812934Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.812986Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:48.813045Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:48.813112Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:48.813174Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:48.813283Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.824041Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.834572Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:48.834697Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:48.835748Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:48.854155Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.854262Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.854685Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.855353Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.855586Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:48.860073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:48.867559Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.991653Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.997604Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:49.034345Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:49.120402Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:49.120856Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:49.121041Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-02-19T08:11:49.132058Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:49.136110Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:49.137066Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:49.150314Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:49.150379Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:49.150601Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:49.150644Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:49.150905Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.150952Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:49.150995Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:49.151043Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.151114Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:49.151963Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:49.152338Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:49.152561Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.152608Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.152658Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:49.152875Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.152943Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.153517Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:49.157982Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:49.158167Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:49.158237Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:49.160381Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:49.160438Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:49.160590Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:49.160629Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:49.160671Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:49.160799Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:49.160853Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:49.160901Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser >> ReadOnlyVDisk::TestWrites [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:42.362060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:42.476626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:42.494661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:42.494727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:42.495194Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:42.844110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:42.844246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:42.924918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488699430431 != 1771488699430435 2026-02-19T08:11:42.934247Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:42.980453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:43.067902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:43.425375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:43.442293Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:43.564178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:43.617241Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:43.617531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:43.768184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:43.768320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:43.774222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:43.774312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:43.774390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:43.774741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:43.774892Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:43.774990Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:43.785746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:43.856578Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:43.857260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:43.857367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:43.857408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:43.857442Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:43.857475Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:43.857974Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:43.858072Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:43.858142Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:43.858186Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:43.858247Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:43.858284Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:43.858341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:43.858459Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:43.858698Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:43.858794Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:43.860833Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:43.874248Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:43.874353Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:44.011927Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:44.016643Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:44.016729Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:44.016984Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:44.017028Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:44.017077Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:44.017355Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:44.017518Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:44.030713Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:44.030824Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:44.032876Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:44.033366Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:44.039996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:44.040064Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:44.040427Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:44.040540Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:44.042162Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:44.042208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:44.042252Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:44.042336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:44.042413Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:44.042538Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:44.047029Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:44.048831Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:44.048928Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:44.049734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:50.514003Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:50.514250Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:50.514377Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:50.514833Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:50.516931Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:50.517522Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:50.517989Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:50.519893Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:50.519951Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:50.524680Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:50.524789Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:50.526495Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:50.526547Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:50.526593Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:50.526656Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:50.526707Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:50.526785Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:50.527371Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:50.529091Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:50.529162Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:50.529991Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:50.540559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.540658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.540726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.541631Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.542881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.548147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:50.555219Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:50.686442Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:50.689379Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:50.724937Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:50.851961Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1059:2878], serverId# [2:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:50.852465Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:50.852642Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-02-19T08:11:50.864177Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:50.869551Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:50.871651Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:50.883897Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:50.883973Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:50.884332Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:50.884394Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:50.884523Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:50.884609Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:50.884661Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:50.884708Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:50.884761Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:50.885559Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:50.885925Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:50.886071Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:50.886129Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:50.886179Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:50.886391Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:50.886443Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:50.886973Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:50.887189Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:50.887294Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:50.887338Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:50.889527Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:50.889591Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:50.889809Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:50.889844Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:50.889881Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:50.890018Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:50.890067Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:50.890119Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 15134745311414383238 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-02-19T08:11:48.474271Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-02-19T08:11:48.479687Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-02-19T08:11:48.485414Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-02-19T08:11:48.488393Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-02-19T08:11:48.497251Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-02-19T08:11:48.499942Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-02-19T08:11:48.502135Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-02-19T08:11:48.504160Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-02-19T08:11:49.664299Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] 2026-02-19T08:11:49.664428Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:49.664555Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] 2026-02-19T08:11:49.665444Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [3939de1e50bbf9dc] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:11:49.667068Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.45 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVPut{ TimestampMs# 0.451 QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 Subrequests# [{ BlobId# [1:1:11:0:0:32768:3] }] } TEvVPut{ TimestampMs# 0.451 QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 Subrequests# [{ BlobId# [1:1:11:0:0:32768:2] }] } TEvVPut{ TimestampMs# 0.452 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:11:0:0:32768:5] }] } TEvVPut{ TimestampMs# 0.452 QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 Subrequests# [{ BlobId# [1:1:11:0:0:32768:1] }] } TEvVPut{ TimestampMs# 0.452 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] }] } TEvVPutResult{ TimestampMs# 1.46 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.529 QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] }] } TEvVPutResult{ TimestampMs# 1.685 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.721 QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVPutResult{ TimestampMs# 1.747 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-02-19T08:11:49.669055Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] 2026-02-19T08:11:49.669220Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] 2026-02-19T08:11:49.670394Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-02-19T08:11:49.672246Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5331:705] 2026-02-19T08:11:49.673099Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] 2026-02-19T08:11:49.674010Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 V ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2026-02-19T08:11:52.303209Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.303365Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2026-02-19T08:11:52.307346Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] 2026-02-19T08:11:52.308980Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2026-02-19T08:11:52.314192Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2026-02-19T08:11:52.317408Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.317512Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2026-02-19T08:11:52.320727Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.320829Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2026-02-19T08:11:52.324302Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.324427Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2026-02-19T08:11:52.327812Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] 2026-02-19T08:11:52.328115Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2026-02-19T08:11:52.332177Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.332311Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2026-02-19T08:11:52.335627Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5345:719] 2026-02-19T08:11:52.335811Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5338:712] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffix-Nullable-Covered >> ReadOnlyVDisk::TestDiscover [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:38.266261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:38.381501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:38.404940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:38.405023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:38.405534Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:38.725272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:38.725405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:38.834295Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488695048748 != 1771488695048752 2026-02-19T08:11:38.841982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:38.890298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:38.996807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:39.301890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:39.318381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:39.435301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:39.510314Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:914:2786] 2026-02-19T08:11:39.510589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.570194Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.570316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.572077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:39.572164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:39.572240Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:39.572593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.572860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.572928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:948:2786] in generation 1 2026-02-19T08:11:39.573302Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:918:2789] 2026-02-19T08:11:39.573480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.583346Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:921:2792] 2026-02-19T08:11:39.583569Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.592272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.592473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.593835Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:11:39.593968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:11:39.594014Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:11:39.594294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.594427Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.594486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:962:2789] in generation 1 2026-02-19T08:11:39.594763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.594834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.596090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:11:39.596147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:11:39.596201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:11:39.596471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.596565Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.596620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:963:2792] in generation 1 2026-02-19T08:11:39.608006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.648254Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:39.648541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.648707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:967:2818] 2026-02-19T08:11:39.648782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:39.648816Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:39.648853Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.649504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.649578Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:11:39.649665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.649743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:968:2819] 2026-02-19T08:11:39.649771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:11:39.649795Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:11:39.649822Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:39.649919Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.649942Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:11:39.649999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.650059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037890, actorId: [1:969:2820] 2026-02-19T08:11:39.650089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:11:39.650118Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:11:39.650138Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:11:39.650273Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:39.650368Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:39.650693Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:39.650734Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:39.650783Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:39.650867Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:39.651126Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:904:2781], serverId# [1:915:2787], sessionId# [0:0:0] 2026-02-19T08:11:39.651169Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:11:39.651219Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:11:39.651244Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-02-19T08:11:39.651285Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-02-19T08:11:39.652303Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:39.652598Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186 ... :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:51.674154Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:51.674242Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:51.675374Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:51.675427Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:51.675507Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:51.675597Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:51.675663Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:51.675763Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:51.678100Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:51.679979Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:51.680072Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:51.680973Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:51.691529Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.691677Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.692161Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.692932Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.693142Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.701528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:51.709383Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:51.831550Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:51.835969Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:51.872437Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:51.956432Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:51.956907Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:51.957111Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-02-19T08:11:51.968266Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.186491Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] { items { uint64_value: 0 } } 2026-02-19T08:11:52.193313Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1098:2910], serverId# [3:1099:2911], sessionId# [0:0:0] 2026-02-19T08:11:52.194422Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:52.210387Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:52.210475Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.210539Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2578: Waiting for PlanStep# 2501 from mediator time cast 2026-02-19T08:11:52.211236Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3816: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-02-19T08:11:52.211305Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.211582Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:52.211629Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 4, at: 72075186224037888 2026-02-19T08:11:52.211793Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1098:2910], serverId# [3:1099:2911], sessionId# [0:0:0] 2026-02-19T08:11:52.211893Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.211943Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.211990Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:52.212049Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.290772Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:6] at 72075186224037888 2026-02-19T08:11:52.290954Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2026-02-19T08:11:52.301976Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:808: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-02-19T08:11:52.302298Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:11:52.302515Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:11:52.302593Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.302910Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [3:1119:2884], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:1066:2884]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:1119:2884].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-02-19T08:11:52.303516Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:1113:2884], SessionActorId: [3:1066:2884], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:1066:2884]. 2026-02-19T08:11:52.303881Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=N2JjYTg2OTktNDQ0Y2YwNmQtMjUxYWI0ZGMtYTcxYjM4ZGY=, ActorId: [3:1066:2884], ActorState: ExecuteState, LegacyTraceId: 01khtf95gm1zae41h1xjsh0s12, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:1114:2884] from: [3:1113:2884] trace_id# 2026-02-19T08:11:52.304035Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:1114:2884] TxId: 281474976715662. Ctx: { TraceId: 01khtf95gm1zae41h1xjsh0s12, Database: , SessionId: ydb://session/3?node_id=3&id=N2JjYTg2OTktNDQ0Y2YwNmQtMjUxYWI0ZGMtYTcxYjM4ZGY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ] trace_id# 2026-02-19T08:11:52.304540Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=N2JjYTg2OTktNDQ0Y2YwNmQtMjUxYWI0ZGMtYTcxYjM4ZGY=, ActorId: [3:1066:2884], ActorState: ExecuteState, LegacyTraceId: 01khtf95gm1zae41h1xjsh0s12, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } trace_id# 2026-02-19T08:11:52.305448Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:7] at 72075186224037888 2026-02-19T08:11:52.305514Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:561: Skip empty write operation for [0:7] at 72075186224037888 2026-02-19T08:11:52.305734Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:43.831900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:44.059124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:44.089154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:44.089225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:44.090156Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:44.389318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:44.389499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:44.491996Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488700832389 != 1771488700832393 2026-02-19T08:11:44.507242Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:44.559244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:44.671789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:45.046670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:45.061372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:45.167740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:45.220406Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:45.220716Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:45.272051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:45.272230Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:45.274230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:45.274324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:45.274405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:45.274845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:45.275019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:45.275119Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:45.285993Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:45.317185Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:45.317457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:45.317601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:45.317675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:45.317734Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:45.317774Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:45.318386Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:45.318513Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:45.318599Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:45.318648Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:45.318717Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:45.318774Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:45.318849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:45.318998Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:45.319357Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:45.319485Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:45.321839Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:45.333065Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:45.333220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:45.468565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:45.476727Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:45.476838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:45.477250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:45.477315Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:45.477382Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:45.477876Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:45.478112Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:45.479144Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:45.479260Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:45.482114Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:45.482738Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:45.485518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:45.485578Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:45.486077Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:45.486211Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:45.488107Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:45.488159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:45.488208Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:45.488313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:45.488389Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:45.488516Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:45.494735Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:45.497157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:45.497251Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:45.498270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:52.008421Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:52.008731Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:52.011682Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:52.012344Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.012425Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:52.012963Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:52.013408Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.015341Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:52.015404Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.015806Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:52.015888Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.017471Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.017524Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:52.017578Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:52.017675Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:52.017792Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:52.017883Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.018499Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.020320Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:52.020413Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:52.021244Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:52.031009Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.031135Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.031205Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.031966Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.032138Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.035834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:52.044449Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.170698Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.174529Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:52.212199Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:52.331015Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1059:2878], serverId# [2:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:52.331463Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:52.331643Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-02-19T08:11:52.342682Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.346949Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:52.347720Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:52.358809Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:52.358895Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.359242Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:52.359313Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:52.359456Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1067:2885], serverId# [2:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:52.359559Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.359604Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.359655Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:52.359714Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.360679Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:52.361010Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:52.361197Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.361244Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.361291Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:52.361507Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.361571Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.362235Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:52.362423Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:52.362523Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:52.362560Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:52.407618Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:52.407691Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:52.407841Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.407876Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.407912Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:52.408030Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.408090Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.408133Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:38.779051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:38.909289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:38.929445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:38.929533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:38.930169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:39.243740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:39.243881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:39.331136Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488695580909 != 1771488695580913 2026-02-19T08:11:39.341174Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:39.394931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:39.524748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:39.824737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:39.838185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:39.950027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:39.987861Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:39.988155Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:40.037308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:40.037430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:40.039115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:40.039195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:40.039276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:40.039629Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:40.039779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:40.039868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:40.050553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:40.104663Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:40.104875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:40.104989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:40.105031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.105066Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:40.105102Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.105609Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:40.105773Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:40.105863Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.105914Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.105955Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:40.105995Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.106062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:40.106194Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:40.106436Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:40.106532Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:40.108615Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.119553Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:40.119672Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:40.247868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:40.252867Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:40.252954Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.253257Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.253306Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:40.253351Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:40.253662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:40.254116Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:40.254774Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.254867Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:40.256884Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:40.257290Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.259692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:40.259741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.260069Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:40.260146Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.261692Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.261747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.261814Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:40.261882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:40.261944Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:40.262029Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.266386Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.268469Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:40.268544Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:40.269335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:52.240971Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:52.241275Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:52.241406Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:52.241592Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.241648Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:52.242183Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:52.242612Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.248045Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:52.248128Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.252119Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:52.252485Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.254048Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.254106Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:52.254162Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:52.254242Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:52.254561Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:52.254660Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.258631Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.260769Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:52.260872Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:52.262477Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:52.275099Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.281728Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.282495Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.283543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.283845Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.289615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:52.297322Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.423093Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.427255Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:52.463863Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:52.595955Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:52.596593Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:52.596805Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-02-19T08:11:52.608883Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.616818Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:52.618204Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:52.630576Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:52.630683Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.630986Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:52.631041Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:52.631434Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.631499Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.631556Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:52.631631Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.631746Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:52.632959Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:52.633410Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:52.633685Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.634173Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.634242Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:52.634535Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.634616Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.635336Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:52.635655Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:52.635768Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:52.635811Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:52.637344Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:52.637386Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:52.637575Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.637618Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:52.637675Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:52.637843Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.637915Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.637977Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |90.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 5170269847196235416 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-02-19T08:11:50.046844Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-02-19T08:11:50.379225Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:705] 2026-02-19T08:11:50.380631Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5341:712] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-02-19T08:11:50.654227Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5348:719] 2026-02-19T08:11:50.655342Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5334:705] 2026-02-19T08:11:50.656026Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5341:712] 2026-02-19T08:11:50.656322Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [f0f987760dffcba5] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:11:50.656655Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.537 QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 Subrequests# [{ BlobId# [1:1:5:0:0:32768:6] }] } TEvVPut{ TimestampMs# 0.538 QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 Subrequests# [{ BlobId# [1:1:5:0:0:32768:5] }] } TEvVPut{ TimestampMs# 0.539 QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 Subrequests# [{ BlobId# [1:1:5:0:0:32768:4] }] } TEvVPut{ TimestampMs# 0.539 QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 Subrequests# [{ BlobId# [1:1:5:0:0:32768:3] }] } TEvVPut{ TimestampMs# 0.54 QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 Subrequests# [{ BlobId# [1:1:5:0:0:32768:2] }] } TEvVPut{ TimestampMs# 0.54 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:5:0:0:32768:1] }] } TEvVPutResult{ TimestampMs# 1.769 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.849 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:5:0:0:32768:1] }] } TEvVPutResult{ TimestampMs# 2.493 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.54 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:5:0:0:32768:1] }] } TEvVPutResult{ TimestampMs# 2.738 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.764 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.781 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.803 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.836 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 2.995 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:39.296943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:39.416586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:39.437344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:39.437471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:39.438020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:39.751415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:39.751569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:39.839654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488696122025 != 1771488696122029 2026-02-19T08:11:39.849395Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:39.897916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:39.999286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:40.319174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:40.333519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:40.443916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:40.480756Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:40.481061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:40.535536Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:40.535666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:40.537334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:40.537433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:40.537522Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:40.537897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:40.538049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:40.538134Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:40.550304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:40.596553Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:40.596769Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:40.596959Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:40.597004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.597037Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:40.597073Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.597548Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:40.597674Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:40.597783Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.597821Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.597865Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:40.597917Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.597995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:40.598120Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:40.598394Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:40.598492Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:40.600629Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.613820Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:40.613937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:40.743230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:40.750616Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:40.750719Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.751103Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.751152Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:40.751238Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:40.751538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:40.751709Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:40.752469Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:40.752584Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:40.754980Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:40.755460Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:40.758514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:40.758575Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.759030Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:40.759114Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.760684Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:40.760737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:40.760790Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:40.760869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:40.760923Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:40.761022Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:40.770357Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:40.772435Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:40.772521Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:40.773337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:52.849074Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:52.849317Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:52.849458Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:52.849642Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:52.849743Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:52.850244Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:52.851550Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:52.853786Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:52.853864Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.855265Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:52.855350Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.857148Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:52.857199Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:52.857257Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:52.857338Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:52.857401Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:52.857487Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:52.859732Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:52.861451Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:52.861517Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:52.863713Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:52.872851Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.872943Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.873280Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.876072Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.876298Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:52.879969Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:52.888511Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:53.023880Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:53.031124Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:11:53.075827Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:53.271805Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:11:53.272369Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:11:53.272580Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-02-19T08:11:53.285984Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:53.290973Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:53.293258Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-02-19T08:11:53.305061Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-02-19T08:11:53.305163Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:53.305442Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:53.305507Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-02-19T08:11:53.305931Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:53.306006Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:53.306064Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:53.306142Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:53.306261Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1067:2885], serverId# [3:1068:2886], sessionId# [0:0:0] 2026-02-19T08:11:53.307746Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:53.308237Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:53.308471Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:53.308525Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:53.308578Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:11:53.308859Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:53.308937Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:53.309857Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-02-19T08:11:53.310258Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:11:53.310410Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-02-19T08:11:53.310474Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-02-19T08:11:53.344079Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:11:53.344154Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715661, at: 72075186224037888 2026-02-19T08:11:53.344324Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:53.344363Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:53.344401Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-02-19T08:11:53.344532Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:53.344597Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:53.344655Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> KqpFulltextIndexes::SelectWithRelevanceContainsAndNgramWildcard+Edge+Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsertWithOverlap-Returning-Covered >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsert+Returning-Covered |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |90.1%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> KqpPrefixedVectorIndexes::PrefixedVectorEmptyIndexedTableInsertWithOverlap-Covered >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteOn-Covered >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:46.397355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:46.559368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:46.583440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:46.583522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:46.584115Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:46.975193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:46.975317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:47.066554Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488702837683 != 1771488702837687 2026-02-19T08:11:47.080334Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:47.127160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:47.256276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:47.728251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:47.746455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:47.866896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:47.906849Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:11:47.907131Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:47.971393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:47.971560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:47.973396Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:47.973491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:47.973570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:47.974608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:47.974786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:47.974891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:11:47.987537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:48.034253Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:48.034446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:48.034550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:11:48.034598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:48.034639Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:48.034686Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.035198Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:48.035309Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:48.035383Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.035420Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.035461Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:48.035502Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.035594Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:48.035722Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:48.035979Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:48.036074Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:48.038144Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.049292Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:48.049414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:48.181240Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:48.186777Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:48.186885Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.187235Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.187293Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:48.187361Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:48.187624Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:48.187767Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:48.188467Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.188568Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:48.190721Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:48.191278Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.193603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:48.193672Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.194117Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:48.194216Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.195766Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.195809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:48.195846Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:48.195917Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:48.195975Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:48.196086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.200905Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:48.203094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:48.203182Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:48.204134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... 6-02-19T08:11:53.163122Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:53.163189Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:53.497410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:53.497575Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:53.531290Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771488709582924 != 1771488709582928 2026-02-19T08:11:53.535219Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:53.585033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:53.661239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:54.011295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:54.030352Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:54.154278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:54.181849Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:892:2770] 2026-02-19T08:11:54.182129Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:54.246010Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:54.246177Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:54.248024Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:54.248125Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:54.248191Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:54.248578Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:54.248753Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:54.248847Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [2:907:2770] in generation 1 2026-02-19T08:11:54.259782Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:54.259914Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:54.260068Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:54.260191Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [2:909:2780] 2026-02-19T08:11:54.260240Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:54.260301Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:54.260353Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:54.260944Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:54.261074Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:54.261181Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:54.261236Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:54.261290Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:54.261344Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:54.261888Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:889:2768], serverId# [2:894:2771], sessionId# [0:0:0] 2026-02-19T08:11:54.262051Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:54.262332Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:54.262429Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:54.264446Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:54.277269Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:54.277421Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:54.404265Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:923:2788], serverId# [2:925:2790], sessionId# [0:0:0] 2026-02-19T08:11:54.406658Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:54.406731Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:54.408441Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:54.408527Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:54.408586Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:54.409016Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:11:54.409193Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:54.410082Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:54.410167Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:54.411053Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:54.411537Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:54.413612Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:11:54.413681Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:54.414142Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:11:54.414229Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:54.419735Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:54.419833Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:54.419918Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:54.420008Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:54.420082Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:54.420206Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:54.421093Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:54.423483Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:11:54.423578Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:11:54.424596Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:11:54.436454Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2816], serverId# [2:960:2817], sessionId# [0:0:0] 2026-02-19T08:11:54.436621Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2026-02-19T08:11:54.436880Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:959:2816], serverId# [2:960:2817], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 7678331943958203044 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-02-19T08:11:52.864428Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-02-19T08:11:52.871257Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-02-19T08:11:52.877168Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-02-19T08:11:52.880826Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-02-19T08:11:52.890457Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-02-19T08:11:52.893613Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-02-19T08:11:52.896858Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-02-19T08:11:52.900047Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-02-19T08:11:55.498555Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.498703Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:55.498847Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:55.499935Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [71882ed4cddc7395] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:11:55.501833Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.564 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVPut{ TimestampMs# 0.565 QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 Subrequests# [{ BlobId# [1:1:11:0:0:32768:3] }] } TEvVPut{ TimestampMs# 0.565 QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 Subrequests# [{ BlobId# [1:1:11:0:0:32768:2] }] } TEvVPut{ TimestampMs# 0.566 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:11:0:0:32768:5] }] } TEvVPut{ TimestampMs# 0.566 QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 Subrequests# [{ BlobId# [1:1:11:0:0:32768:1] }] } TEvVPut{ TimestampMs# 0.566 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] }] } TEvVPutResult{ TimestampMs# 1.751 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.839 QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] }] } TEvVPutResult{ TimestampMs# 2.031 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.089 QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVPutResult{ TimestampMs# 2.129 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-02-19T08:11:55.503930Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.504305Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:55.505312Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-02-19T08:11:55.507478Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.508400Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:55.509398Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 ... d-only Sender# [1:5332:705] 2026-02-19T08:11:55.524585Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:55.524725Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2026-02-19T08:11:55.527944Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.528240Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:55.528354Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2026-02-19T08:11:55.531445Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.531600Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:55.531730Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2026-02-19T08:11:55.539102Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5333:706] 2026-02-19T08:11:55.539340Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2026-02-19T08:11:55.539408Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] 2026-02-19T08:11:55.540175Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [b4d2a72d113f6a5b] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2026-02-19T08:11:55.540720Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: {BPG72@dsproxy_get.cpp:427} Query history GroupId# 2181038080 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.645 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.646 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:11:0:0:32768:5] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.647 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.648 QueryCount# 6 VDiskId# [82000000:1:0:3:0] NodeId# 4 Subrequests# [{ BlobId# [1:1:11:0:0:32768:1] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:2] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:3] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:4] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:5] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:6] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.648 QueryCount# 6 VDiskId# [82000000:1:0:4:0] NodeId# 5 Subrequests# [{ BlobId# [1:1:11:0:0:32768:1] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:2] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:3] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:4] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:5] Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:6] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.648 QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 Subrequests# [{ BlobId# [1:1:11:0:0:32768:1] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.649 QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 Subrequests# [{ BlobId# [1:1:11:0:0:32768:2] Shift# 0 Size# 8192 }] } TEvVGet{ TimestampMs# 0.649 QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 Subrequests# [{ BlobId# [1:1:11:0:0:32768:3] Shift# 0 Size# 8192 }] } TEvVGetResult{ TimestampMs# 3.704 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:4] Status# NODATA Shift# 0 Size# 0 }] } TEvVGetResult{ TimestampMs# 4.582 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:1] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:2] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:3] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:4] Status# OK Shift# 0 Size# 8192 }, { BlobId# [1:1:11:0:0:32768:5] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:6] Status# NODATA Shift# 0 Size# 0 }] } TEvVGetResult{ TimestampMs# 4.646 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:1] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:2] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:3] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:4] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:5] Status# NODATA Shift# 0 Size# 0 }, { BlobId# [1:1:11:0:0:32768:6] Status# NODATA Shift# 0 Size# 0 }] } TEvVGetResult{ TimestampMs# 4.687 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:1] Status# OK Shift# 0 Size# 8192 }] } TEvVGetResult{ TimestampMs# 4.722 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:2] Status# OK Shift# 0 Size# 8192 }] } TEvVGetResult{ TimestampMs# 5.096 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:3] Status# OK Shift# 0 Size# 8192 }] } TEvVPut{ TimestampMs# 5.132 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:11:0:0:32768:4] }] } TEvVPut{ TimestampMs# 5.133 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:11:0:0:32768:5] }] } TEvVPut{ TimestampMs# 5.133 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVGetResult{ TimestampMs# 5.899 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:5] Status# NODATA Shift# 0 Size# 0 }] } TEvVPut{ TimestampMs# 5.911 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:11:0:0:32768:5] }] } TEvVGetResult{ TimestampMs# 5.972 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# OK SubrequestResults# [{ BlobId# [1:1:11:0:0:32768:6] Status# NODATA Shift# 0 Size# 0 }] } TEvVPut{ TimestampMs# 5.978 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:11:0:0:32768:6] }] } TEvVPutResult{ TimestampMs# 6.019 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } PutAcceleration{ TimestampMs# 6.023 } PutAcceleration{ TimestampMs# 6.058 } TEvVPutResult{ TimestampMs# 6.346 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPutResult{ TimestampMs# 6.588 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } 2026-02-19T08:11:55.540888Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5340:713] 2026-02-19T08:11:55.540974Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5347:720] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpsertClusterChange+Covered >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChange-Covered-Overlap >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin >> KqpFulltextIndexes::SelectWithFulltextMatchAndEdgeNgramWildcard [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdateNoChange+Nullable+Covered >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsert-Nullable-Covered >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/fulltext/unittest >> KqpFulltextIndexes::SelectWithRelevanceContainsAndNgramWildcard+Edge+Covered [GOOD] Test command err: 2026-02-19T08:11:23.893460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485961513721142:2255];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:23.893543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:24.466519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:24.466620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:24.495593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:24.518668Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:24.528901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485961513720924:2081] 1771488683877841 != 1771488683877844 2026-02-19T08:11:24.544988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:24.769909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:24.870345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:24.870364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:24.870370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:24.870447Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:24.892198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:25.728671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:25.894159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:26.114981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:26.353310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:26.472900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.670913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485982988559277:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.671030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.671600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485982988559287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.671639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.905853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485961513721142:2255];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:28.910841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:29.167214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.222478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.286621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.350296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.427897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.522886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.629021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.780720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:30.018942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485991578494760:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:30.019031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:30.019541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485991578494765:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:30.019578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485991578494766:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:30.019708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:30.024211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:30.043127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608485991578494769:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:11:30.132545Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485991578494820:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:32.355473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:32.672452Z no ... called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:47.097190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:47.326423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:47.429586Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.419100Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486077875428147:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.419187Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.419654Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486077875428157:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.419707Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.499484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.544379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.601373Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.638664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.680704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.728873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.782339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.873993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:50.985627Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486056400590034:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:50.985704Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:50.996394Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486077875429036:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.996553Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.996980Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486077875429042:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.997001Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486077875429041:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:50.997053Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.001973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:51.027487Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:11:51.028708Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486077875429045:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:11:51.100889Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486082170396395:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:53.212451Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:53.563361Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:53.690859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:11:54.702610Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2118: SelfId: [3:7608486095055299307:2637], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01khtf97s07pp5s23dw6j3hqcx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzU2MDQ2N2UtZDZkZDMwMjYtZDhiMzQyNWUtMzY5NjEwYzk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: No search terms were extracted from the query } 2026-02-19T08:11:54.702661Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608486095055299307:2637], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01khtf97s07pp5s23dw6j3hqcx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzU2MDQ2N2UtZDZkZDMwMjYtZDhiMzQyNWUtMzY5NjEwYzk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: BAD_REQUEST DEFAULT_ERROR: {
: Error: No search terms were extracted from the query }. 2026-02-19T08:11:54.703114Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [3:7608486095055299308:2638], TxId: 281474976710677, task: 2. Ctx: { CheckpointId : . TraceId : 01khtf97s07pp5s23dw6j3hqcx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzU2MDQ2N2UtZDZkZDMwMjYtZDhiMzQyNWUtMzY5NjEwYzk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7608486095055299301:2616], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:11:54.703479Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=MzU2MDQ2N2UtZDZkZDMwMjYtZDhiMzQyNWUtMzY5NjEwYzk=, ActorId: [3:7608486095055299258:2616], ActorState: ExecuteState, LegacyTraceId: 01khtf97s07pp5s23dw6j3hqcx, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "No search terms were extracted from the query" severity: 1 } trace_id# |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:38.581475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:38.675098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:38.692427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:38.692536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:38.692982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:38.941889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:38.942039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:39.048505Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488695666737 != 1771488695666741 2026-02-19T08:11:39.063491Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:39.116510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:39.210469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:39.542354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:39.558675Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:39.667338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:39.726847Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:914:2786] 2026-02-19T08:11:39.727092Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.785993Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.786098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.787586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:39.787652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:39.787714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:39.788019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.788244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.788308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:948:2786] in generation 1 2026-02-19T08:11:39.788700Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:918:2789] 2026-02-19T08:11:39.788880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.797932Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:921:2792] 2026-02-19T08:11:39.798133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:39.806551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.806729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.807967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:11:39.808075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:11:39.808116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:11:39.808383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.808504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.808566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:962:2789] in generation 1 2026-02-19T08:11:39.808816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:39.808896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:39.810173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:11:39.810229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:11:39.810268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:11:39.810574Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:39.810665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:39.810722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:963:2792] in generation 1 2026-02-19T08:11:39.821580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.860893Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:39.861079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.861184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:967:2818] 2026-02-19T08:11:39.861228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:39.861282Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:39.861323Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:39.861859Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.861903Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:11:39.861970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.862025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:968:2819] 2026-02-19T08:11:39.862068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:11:39.862093Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:11:39.862123Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:39.862201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:39.862227Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:11:39.862277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:39.862321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037890, actorId: [1:969:2820] 2026-02-19T08:11:39.862339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:11:39.862358Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:11:39.862378Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:11:39.862521Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:39.862604Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:39.862882Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:39.862924Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:39.862968Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:39.863027Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:39.863225Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:904:2781], serverId# [1:915:2787], sessionId# [0:0:0] 2026-02-19T08:11:39.863267Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:11:39.863337Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:11:39.863379Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-02-19T08:11:39.863439Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-02-19T08:11:39.863879Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:39.864147Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186 ... shard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:11:56.034632Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:56.034816Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-02-19T08:11:56.034912Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-02-19T08:11:56.034953Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-02-19T08:11:56.035049Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-02-19T08:11:56.035086Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-02-19T08:11:56.035120Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-02-19T08:11:56.035177Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1287:3026] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2026-02-19T08:11:56.035289Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:11:56.035357Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1287:3026] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2026-02-19T08:11:56.035403Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1287:3026] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2026-02-19T08:11:56.035441Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:904: [DistEraser] [3:1287:3026] Register plan: txId# 281474976715662, minStep# 2502, maxStep# 32502 2026-02-19T08:11:56.048246Z node 3 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-02-19T08:11:56.048429Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:11:56.055403Z node 3 :TX_DATASHARD ERROR: datashard_distributed_erase.cpp:167: [DistEraser] [3:1287:3026] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2026-02-19T08:11:56.055593Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3671: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2026-02-19T08:11:56.055704Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3671: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2026-02-19T08:11:56.056309Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037889 2026-02-19T08:11:56.056376Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-02-19T08:11:56.057086Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:56.057158Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:56.057214Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-02-19T08:11:56.057323Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:11:56.057565Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1282:3022], serverId# [3:1283:3023], sessionId# [0:0:0] 2026-02-19T08:11:56.080839Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1299:3037] 2026-02-19T08:11:56.081099Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:56.090342Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:56.091063Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:56.093953Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:56.094077Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:56.094150Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:56.094676Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:56.095814Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:56.095917Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [3:1314:3037] in generation 2 2026-02-19T08:11:56.109875Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:56.110044Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-02-19T08:11:56.110216Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:56.110666Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [3:1316:3045] 2026-02-19T08:11:56.110731Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:56.110784Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:56.110820Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:56.111117Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:759: TxInitSchemaDefaults.Execute 2026-02-19T08:11:56.111436Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:771: TxInitSchemaDefaults.Complete 2026-02-19T08:11:56.112819Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:56.112946Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:56.113086Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2501 2026-02-19T08:11:56.113137Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:56.113332Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:56.113483Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:56.113540Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:56.113598Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 1 2026-02-19T08:11:56.113652Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:56.114021Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-02-19T08:11:56.114082Z node 3 :TX_DATASHARD INFO: datashard.cpp:4137: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-02-19T08:11:56.114137Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-02-19T08:11:56.114351Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-02-19T08:11:56.114386Z node 3 :TX_DATASHARD INFO: datashard.cpp:4137: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-02-19T08:11:56.114415Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-02-19T08:11:56.114532Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 2500 next step 2501 2026-02-19T08:11:56.114613Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2026-02-19T08:11:56.114711Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:11:56.114775Z node 3 :TX_DATASHARD INFO: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037889 2026-02-19T08:11:56.114838Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:11:56.114909Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:11:56.114993Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2026-02-19T08:11:56.115049Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-02-19T08:11:56.115090Z node 3 :TX_DATASHARD INFO: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037890 2026-02-19T08:11:56.115128Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-02-19T08:11:56.115169Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037890 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-02-19T08:11:56.115252Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2026-02-19T08:11:56.115392Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:30.257900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:30.392425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:30.411716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:30.411996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:30.412482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:30.705639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:30.706039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:30.800112Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488685625896 != 1771488685625900 2026-02-19T08:11:30.809041Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:30.858819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:30.987964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:31.315564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:31.338503Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:31.478029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.541378Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:914:2786] 2026-02-19T08:11:31.541667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:31.627502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:31.627624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:31.629191Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:31.629280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:31.629349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:31.636176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:31.636603Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:31.636685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:948:2786] in generation 1 2026-02-19T08:11:31.637093Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:918:2789] 2026-02-19T08:11:31.637391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:31.650916Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:921:2792] 2026-02-19T08:11:31.651188Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:31.660811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:31.661038Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:31.662590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:11:31.662686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:11:31.662818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:11:31.663123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:31.663313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:31.663382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:962:2789] in generation 1 2026-02-19T08:11:31.663711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:31.663814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:31.665090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:11:31.665151Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:11:31.665208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:11:31.665537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:31.665667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:31.665758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:963:2792] in generation 1 2026-02-19T08:11:31.678472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:31.704753Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:31.704967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:31.705077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:967:2818] 2026-02-19T08:11:31.705123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:31.705159Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:31.705205Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:31.705847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:31.705903Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:11:31.705966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:31.706021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:968:2819] 2026-02-19T08:11:31.706043Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:11:31.706078Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:11:31.706107Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:31.706179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:31.706221Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:11:31.706272Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:31.706324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037890, actorId: [1:969:2820] 2026-02-19T08:11:31.706343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:11:31.706364Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:11:31.706382Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:11:31.706529Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:31.706619Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:31.706926Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:31.706971Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:31.707011Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:31.707069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:31.707322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:904:2781], serverId# [1:915:2787], sessionId# [0:0:0] 2026-02-19T08:11:31.707366Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:11:31.707424Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:11:31.707456Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-02-19T08:11:31.707504Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-02-19T08:11:31.708016Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:31.708314Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186 ... Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-02-19T08:11:55.889859Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [3500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1610:3243], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:11:55.889989Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 } 2026-02-19T08:11:55.890070Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:11:55.890338Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1610:3243] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2026-02-19T08:11:55.890659Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2026-02-19T08:11:55.890771Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2026-02-19T08:11:55.890944Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2026-02-19T08:11:55.891285Z node 3 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 3 change records: to# [3:1416:3123], at tablet# 72075186224037891 2026-02-19T08:11:55.891343Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2026-02-19T08:11:55.891416Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 3500} 2026-02-19T08:11:55.891472Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-02-19T08:11:55.891516Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037893 2026-02-19T08:11:55.891887Z node 3 :TX_DATASHARD DEBUG: datashard_change_receiving.cpp:468: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2026-02-19T08:11:55.892177Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-02-19T08:11:55.892219Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:55.892259Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2026-02-19T08:11:55.892634Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:55.905386Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-02-19T08:11:55.905480Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [3500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1610:3243], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:11:55.905585Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2026-02-19T08:11:55.905644Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-02-19T08:11:55.905820Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1610:3243] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2026-02-19T08:11:55.905868Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1610:3243] Reply: txId# 281474976715666, status# OK, error# 2026-02-19T08:11:55.906008Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2026-02-19T08:11:55.906322Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037891 2026-02-19T08:11:55.906371Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 4, at: 72075186224037891 2026-02-19T08:11:55.906511Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1604:3238], serverId# [3:1605:3239], sessionId# [0:0:0] 2026-02-19T08:11:55.906659Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-02-19T08:11:55.906698Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:55.906730Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:11:55.906788Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-02-19T08:11:55.906882Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2026-02-19T08:11:55.906910Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1099: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2026-02-19T08:11:55.907025Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1099: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2026-02-19T08:11:55.907059Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1099: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2026-02-19T08:11:55.909500Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037893 2026-02-19T08:11:55.909930Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037893 2026-02-19T08:11:55.910151Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-02-19T08:11:55.910209Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.910262Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2026-02-19T08:11:55.910497Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.910570Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-02-19T08:11:55.911185Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-02-19T08:11:55.911374Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-02-19T08:11:55.952281Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037893 2026-02-19T08:11:55.952363Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715667, at: 72075186224037893 2026-02-19T08:11:55.952591Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-02-19T08:11:55.952628Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.952667Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2026-02-19T08:11:55.952797Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:55.952862Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-02-19T08:11:55.952916Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-02-19T08:11:55.955334Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-02-19T08:11:55.955638Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-02-19T08:11:55.955819Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:11:55.955857Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.955892Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2026-02-19T08:11:55.956072Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.956123Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-02-19T08:11:55.956647Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-02-19T08:11:55.956768Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-02-19T08:11:55.958731Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037892 2026-02-19T08:11:55.958774Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715668, at: 72075186224037892 2026-02-19T08:11:55.958958Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:11:55.958990Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:11:55.959026Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2026-02-19T08:11:55.959133Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:55.959186Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-02-19T08:11:55.959233Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [FAIL] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/fulltext/unittest >> KqpFulltextIndexes::SelectWithFulltextMatchAndEdgeNgramWildcard [GOOD] Test command err: 2026-02-19T08:11:23.690365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485959233290885:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:23.690429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:24.161195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:24.162506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:24.190686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:24.237874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485959233290845:2081] 1771488683685952 != 1771488683685955 2026-02-19T08:11:24.257057Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:24.265342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:24.421407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:24.428891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:24.428921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:24.428928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:24.428995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:24.724202Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:24.930325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:25.041173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.342274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.637951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.806091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.259742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485980708129204:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.259842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.260280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485980708129214:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.260332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.690495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485959233290885:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:28.690558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:29.054665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.122801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.191406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.277092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.346033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.436249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.531939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.605240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:29.778502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485985003097390:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:29.778597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:29.778961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485985003097395:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:29.779006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485985003097396:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:29.779413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:29.784061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:29.809634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608485985003097399:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:11:29.923514Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485985003097450:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:32.026112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Key: 1 Key: 4 Key: 7 Key: 6 Ke ... on.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:48.459051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:48.668218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:48.776288Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.181893Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486081769555955:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.182006Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.183168Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486081769555965:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.183250Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.276675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.319884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.378479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.427981Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.466477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.504962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.550417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.621735Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:51.724570Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486081769556830:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.724684Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.725011Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486081769556835:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.725112Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486081769556836:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.725239Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:51.728678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:51.739461Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486081769556839:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:11:51.826009Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486081769556890:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:52.032937Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486064589684939:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:52.033034Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:53.724534Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:54.069779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:54.145185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:11:56.035098Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2118: SelfId: [3:7608486103244394092:2607], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01khtf98zy356r0tk12wwpg0dq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YWU0NzExM2UtNDk1MjI2MmUtODljYzI3NjAtMjRkNGVlYWI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: No search terms were extracted from the query } 2026-02-19T08:11:56.035841Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608486103244394092:2607], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01khtf98zy356r0tk12wwpg0dq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YWU0NzExM2UtNDk1MjI2MmUtODljYzI3NjAtMjRkNGVlYWI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: BAD_REQUEST DEFAULT_ERROR: {
: Error: No search terms were extracted from the query }. 2026-02-19T08:11:56.036415Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [3:7608486103244394093:2608], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01khtf98zy356r0tk12wwpg0dq. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YWU0NzExM2UtNDk1MjI2MmUtODljYzI3NjAtMjRkNGVlYWI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7608486103244394087:2598], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:11:56.036953Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=YWU0NzExM2UtNDk1MjI2MmUtODljYzI3NjAtMjRkNGVlYWI=, ActorId: [3:7608486098949426771:2598], ActorState: ExecuteState, LegacyTraceId: 01khtf98zy356r0tk12wwpg0dq, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "No search terms were extracted from the query" severity: 1 } trace_id# >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> KqpFulltextIndexes::DeleteRowWithRelevance-Covered [GOOD] >> KqpFulltextIndexes::ExplainFulltextIndexContains >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |90.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:43.324721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:43.468246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:43.492378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:43.492467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:43.494682Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:43.888911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:43.889038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:44.045021Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488700410694 != 1771488700410698 2026-02-19T08:11:44.072955Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:44.138215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:44.232320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:44.570620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:44.584978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:44.703466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:44.788396Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:914:2786] 2026-02-19T08:11:44.788723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:44.843127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:44.843285Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:44.845208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:44.845304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:44.845392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:44.845885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:44.846225Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:44.846318Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:948:2786] in generation 1 2026-02-19T08:11:44.846814Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:918:2789] 2026-02-19T08:11:44.847027Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:44.857945Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:921:2792] 2026-02-19T08:11:44.858188Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:44.867709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:44.867955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:44.869546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:11:44.869697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:11:44.869855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:11:44.870229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:44.870398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:44.870487Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:962:2789] in generation 1 2026-02-19T08:11:44.870848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:44.870966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:44.872366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:11:44.872436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:11:44.872478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:11:44.872842Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:44.872958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:44.873051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:963:2792] in generation 1 2026-02-19T08:11:44.886981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:44.930800Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:44.931113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:44.931269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:967:2818] 2026-02-19T08:11:44.931332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:44.931394Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:44.931434Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:44.932147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:44.932200Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:11:44.932283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:44.932367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:968:2819] 2026-02-19T08:11:44.932405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:11:44.932432Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:11:44.932473Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:44.932562Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:44.932595Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:11:44.932645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:44.932698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037890, actorId: [1:969:2820] 2026-02-19T08:11:44.932722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:11:44.932743Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:11:44.932764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:11:44.932950Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:44.933063Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:44.933433Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:44.933491Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:44.933542Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:44.933603Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:44.934081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:904:2781], serverId# [1:915:2787], sessionId# [0:0:0] 2026-02-19T08:11:44.934137Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:11:44.934214Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:11:44.934251Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-02-19T08:11:44.934334Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-02-19T08:11:44.934913Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:44.935248Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186 ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:11:58.943576Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2026-02-19T08:11:58.989173Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:11:58.989224Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2026-02-19T08:11:58.990262Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:11:58.990302Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-02-19T08:11:58.990700Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:263: 72075186224037889 snapshot complete for split OpId 281474976715663 2026-02-19T08:11:58.990918Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2026-02-19T08:11:58.990971Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2026-02-19T08:11:58.991002Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2026-02-19T08:11:58.991031Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2026-02-19T08:11:58.991254Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2026-02-19T08:11:58.991461Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2026-02-19T08:11:58.991494Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2026-02-19T08:11:58.991524Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2026-02-19T08:11:58.991557Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2026-02-19T08:11:58.991654Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:339: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2026-02-19T08:11:58.992162Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:431: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2026-02-19T08:11:58.992341Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2378: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2026-02-19T08:11:58.992443Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2378: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2026-02-19T08:11:58.992695Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037891, clientId# [3:1389:3102], serverId# [3:1390:3103], sessionId# [0:0:0] 2026-02-19T08:11:58.992730Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037892, clientId# [3:1388:3101], serverId# [3:1391:3104], sessionId# [0:0:0] 2026-02-19T08:11:58.992856Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-02-19T08:11:58.993423Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-02-19T08:11:58.994901Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976715663 2026-02-19T08:11:58.995080Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-02-19T08:11:58.995169Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:58.995256Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:11:58.995316Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037891, actorId: [3:1394:3107] 2026-02-19T08:11:58.995346Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037891 2026-02-19T08:11:58.995388Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037891 2026-02-19T08:11:58.995422Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:11:58.995631Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:468: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2026-02-19T08:11:58.996145Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-02-19T08:11:58.996180Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:58.996206Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:11:58.996232Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-02-19T08:11:58.996352Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2026-02-19T08:11:58.996385Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:11:58.996669Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1389:3102], serverId# [3:1390:3103], sessionId# [0:0:0] 2026-02-19T08:11:58.996806Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976715663 2026-02-19T08:11:58.996895Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-02-19T08:11:58.996958Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:58.997029Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037892 2026-02-19T08:11:58.997071Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037892, actorId: [3:1396:3109] 2026-02-19T08:11:58.997092Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037892 2026-02-19T08:11:58.997120Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037892 2026-02-19T08:11:58.997142Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-02-19T08:11:58.997244Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:468: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2026-02-19T08:11:58.997873Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 2500 next step 3000 2026-02-19T08:11:58.997931Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2830: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-02-19T08:11:58.997998Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-02-19T08:11:58.998024Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-02-19T08:11:58.998147Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:11:58.998175Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:58.998202Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-02-19T08:11:58.998252Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-02-19T08:11:58.998372Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1388:3101], serverId# [3:1391:3104], sessionId# [0:0:0] 2026-02-19T08:11:58.998594Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 2500 next step 3000 2026-02-19T08:11:58.998629Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2830: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-02-19T08:11:59.021018Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:492: 72075186224037889 ack split to schemeshard 281474976715663 2026-02-19T08:11:59.027943Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:572: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-02-19T08:11:59.030646Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037889 2026-02-19T08:11:59.030710Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-02-19T08:11:59.031049Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1282:3022], serverId# [3:1283:3023], sessionId# [0:0:0] 2026-02-19T08:11:59.031142Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:11:59.031184Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037889 state 5 2026-02-19T08:11:59.031403Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:539: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2026-02-19T08:11:59.031467Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:11:59.031516Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 >> ReadOnlyVDisk::TestReads [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 4248065877345341758 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous >> TNebiusAccessServiceTest::PassRequestId ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2026-02-19T08:12:01.796764Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c95a03e2ed0] Connect to grpc://localhost:18685 2026-02-19T08:12:01.799394Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c95a03e2ed0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2026-02-19T08:12:01.807628Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c95a03e2ed0] Status 7 Permission Denied 2026-02-19T08:12:01.807994Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c95a03e2ed0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2026-02-19T08:12:01.810751Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c95a03e2ed0] Response AuthenticateResponse { account { user_account { id: "1234" } } } >> TNebiusAccessServiceTest::PassRequestId [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |90.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin >> TNebiusAccessServiceTest::Authorize >> TNebiusAccessServiceTest::Authorize [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-02-19T08:12:02.855041Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c81647e3cd0]{reqId} Connect to grpc://localhost:7329 2026-02-19T08:12:02.863882Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c81647e3cd0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-02-19T08:12:02.881939Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c81647e3cd0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] Leader for TabletID 9437184 is [3:172:2144] sender: [3:173:2058] recipient: [3:164:2140] Leader for TabletID 9437185 is [0:0:0] sender: [4:176:2049] recipient: [4:167:2097] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:176:2049] recipient: [4:167:2097] Leader for TabletID 9437185 is [4:188:2100] sender: [4:190:2049] recipient: [4:167:2097] Leader for TabletID 9437184 is [3:172:2144] sender: [3:216:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:220:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:188:2100] sender: [3:223:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:221:2049] recipient: [4:161:2096] Leader for TabletID 9437185 is [4:188:2100] sender: [4:226:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [4:228:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [3:258:2058] recipient: [3:15:2062] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2026-02-19T08:12:03.612623Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cde4f0e4750] Connect to grpc://localhost:18128 2026-02-19T08:12:03.693857Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde4f0e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-02-19T08:12:03.708460Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cde4f0e4750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2026-02-19T08:12:03.710139Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde4f0e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2026-02-19T08:12:03.712418Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde4f0e4750] Status 7 Permission Denied 2026-02-19T08:12:03.714115Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde4f0e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-02-19T08:12:03.716049Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde4f0e4750] Status 7 Permission Denied 2026-02-19T08:12:03.717139Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cde4f0e4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2026-02-19T08:12:03.718862Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cde4f0e4750] Status 7 Permission Denied >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> KqpImmediateEffects::UpsertAfterInsert >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteOn-Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteOnReturning+Covered >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpInplaceUpdate::SingleRowArithm-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 4728672606395482978 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-02-19T08:11:54.967726Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-02-19T08:11:54.974087Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-02-19T08:11:55.990446Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:55.991490Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-02-19T08:11:56.461238Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:56.461436Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-02-19T08:11:56.847144Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:56.848379Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:56.849516Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:56.849879Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [83071f3b2db5c7aa] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:11:56.851879Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.616 QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 Subrequests# [{ BlobId# [1:1:4:0:0:131072:6] }] } TEvVPut{ TimestampMs# 0.616 QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 Subrequests# [{ BlobId# [1:1:4:0:0:131072:5] }] } TEvVPut{ TimestampMs# 0.616 QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 Subrequests# [{ BlobId# [1:1:4:0:0:131072:4] }] } TEvVPut{ TimestampMs# 0.617 QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 Subrequests# [{ BlobId# [1:1:4:0:0:131072:3] }] } TEvVPut{ TimestampMs# 0.618 QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 Subrequests# [{ BlobId# [1:1:4:0:0:131072:2] }] } TEvVPut{ TimestampMs# 0.618 QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 Subrequests# [{ BlobId# [1:1:4:0:0:131072:1] }] } TEvVPutResult{ TimestampMs# 1.29 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.35 QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 Subrequests# [{ BlobId# [1:1:4:0:0:131072:6] }] } TEvVPutResult{ TimestampMs# 2.771 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.828 QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 Subrequests# [{ BlobId# [1:1:4:0:0:131072:6] }] } TEvVPutResult{ TimestampMs# 3.161 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 3.183 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 3.2 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 3.222 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 3.257 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 3.407 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2026-02-19T08:11:57.483825Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:57.484082Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:57.484148Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-02-19T08:11:58.498273Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:58.498484Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:58.498550Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:58.498605Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-02-19T08:11:58.828627Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:58.828843Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:58.828905Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:58.828962Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:11:58.829013Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-02-19T08:11:59.115913Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:59.116118Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:59.116175Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:59.116225Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:11:59.116276Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:11:59.116325Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-02-19T08:11:59.386912Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5332:705] 2026-02-19T08:11:59.387141Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:59.387214Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:59.387274Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:11:59.387336Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:11:59.387396Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:11:59.387455Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-02-19T08:11:59.690563Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5339:712] 2026-02-19T08:11:59.690677Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:11:59.690739Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:11:59.690796Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:11:59.690856Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:11:59.690917Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2026-02-19T08:12:00.036764Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5346:719] 2026-02-19T08:12:00.036843Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:12:00.036887Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:12:00.036939Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:12:00.036993Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2026-02-19T08:12:00.502036Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5353:726] 2026-02-19T08:12:00.502161Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:12:00.502228Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:12:00.502291Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-02-19T08:12:00.923038Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5360:733] 2026-02-19T08:12:00.923107Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:12:00.923145Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-02-19T08:12:02.147335Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5367:740] 2026-02-19T08:12:02.147443Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-02-19T08:12:02.696725Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5374:747] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin >> KqpImmediateEffects::Upsert >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestGetMultipart >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdateNoChange+Nullable+Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdateFilterClusterChangeReturning-Covered >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpsertClusterChange+Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning-Covered-Overlap >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChange-Covered-Overlap [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning+Covered+Overlap >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsert+Returning-Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsert-Returning+Covered >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsertWithOverlap-Returning-Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexTruncateTable+Covered+Overlap >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic [FAIL] >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> KqpFulltextIndexes::ExplainFulltextIndexContains [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:11:18.644266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:11:18.644344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:18.644397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:11:18.644426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:11:18.644470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:11:18.644526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:11:18.644551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:11:18.644574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:11:18.644640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:11:18.644691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:11:18.645388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:11:18.645652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:11:18.718910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:11:18.718964Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:18.732995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:11:18.733696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:11:18.733908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:11:18.743540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:11:18.744080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:11:18.744561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:11:18.744781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:11:18.747939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:18.748167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:11:18.749423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:11:18.749487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:11:18.749624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:11:18.749690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:11:18.749770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:11:18.749978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:11:18.904262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.905954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.906026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.906110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.906179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:11:18.906281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HARD INFO: schemeshard__operation_backup_restore_common.h:489: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-02-19T08:12:06.539847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> ProposedWaitParts 2026-02-19T08:12:06.540033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:12:06.556138Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:812: [Export] [s3] Bootstrap: self# [1:3860:5820], attempt# 0 2026-02-19T08:12:06.639536Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3860:5820], sender# [1:3859:5819] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:17572 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 91682783-7EC9-4E11-9103-7EDB49979FEC amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-02-19T08:12:06.658179Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5820], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-02-19T08:12:06.662494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:06.662574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:12:06.662861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:06.662909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:12:06.663762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:12:06.663829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:328: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:12:06.665373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:12:06.665486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:12:06.665524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:12:06.665573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:12:06.665617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:17572 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 10D81FB3-9554-4D23-B143-FFB759BE6A0A amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-02-19T08:12:06.668645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:12:06.669065Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5820], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-02-19T08:12:06.669354Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3859:5819] 2026-02-19T08:12:06.669949Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3860:5820], sender# [1:3859:5819], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-02-19T08:12:06.675314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:17572 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: BDBD514A-81EB-4BB6-B033-66431A84D10F amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2026-02-19T08:12:06.678550Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3860:5820], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2026-02-19T08:12:06.678618Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3860:5820], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-02-19T08:12:06.679029Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3859:5819], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-02-19T08:12:06.718288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-02-19T08:12:06.718369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:12:06.718547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-02-19T08:12:06.718658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:303: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-02-19T08:12:06.718728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:06.718779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:12:06.718826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:12:06.718884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:12:06.719037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:06.727846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:12:06.728538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:12:06.728594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:12:06.728704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:12:06.728744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:12:06.728787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:12:06.728821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:12:06.728882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:12:06.728956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 102 2026-02-19T08:12:06.729020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:12:06.729062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:12:06.729110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:12:06.729236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:12:06.740736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:12:06.740832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3843:5804] TestWaitNotification: OK eventTxId 102 >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/fulltext/unittest >> KqpFulltextIndexes::ExplainFulltextIndexContains [GOOD] Test command err: 2026-02-19T08:11:23.662069Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485962714360446:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:23.662162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:23.953518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:23.981067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:23.981184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:24.062334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:24.064012Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:24.065794Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485962714360397:2081] 1771488683657796 != 1771488683657799 2026-02-19T08:11:24.224921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:24.289299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:24.289319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:24.289325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:24.289418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:24.672331Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:24.758875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:24.867224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.064278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.381956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:25.511971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:27.794344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485979894231454:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:27.794743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:27.800793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485979894231464:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:27.800946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.166414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.223521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.266254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.363927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.425151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.516218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.614183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.690357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608485962714360446:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:28.690613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:11:28.729697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:28.933684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485984189199634:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.933779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.934232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485984189199639:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.934269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608485984189199640:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.934374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:28.939010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:28.953906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608485984189199643:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:11:29.034838Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608485988484166990:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:31.631540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:31.949592Z no ... ript_executions 2026-02-19T08:12:00.837634Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:00.855509Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.945371Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:00.951676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.157337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.246230Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.536048Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608486138235310286:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.536161Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.536583Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608486138235310296:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.536642Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.645120Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.687937Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.728825Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.765313Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.808890Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.852640Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.901240Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.934310Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608486116760471957:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:04.934372Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:04.971363Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.068120Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608486142530278467:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.068261Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.068385Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608486142530278472:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.068505Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608486142530278474:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.068577Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.074377Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:05.086707Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608486142530278476:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:05.185059Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608486142530278528:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:07.329166Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.604468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.677395Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Operators":[{"SearchTerms":"cats","Query":"\"cats\"","Name":"ReadFullTextIndex","Inputs":[],"Index":"fulltext_idx","Columns":"Key, Text","Table":"\/Root\/Texts","ItemsLimit":"\"1001\"","QueryColumns":"Text"}],"Node Type":"ReadFullTextIndex"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"SearchTerms":"cats","Query":"\"cats\"","Name":"ReadFullTextIndex","Index":"fulltext_idx","Columns":"Key, Text","Table":"\/Root\/Texts","ItemsLimit":"\"1001\"","QueryColumns":"Text"}],"Node Type":"ReadFullTextIndex"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> ReadOnlyVDisk::TestSync [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [FAIL] >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |90.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2026-02-19T08:12:11.663917Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:11.670855Z node 1 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2026-02-19T08:12:11.670909Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 2 is not local. 2026-02-19T08:12:11.671019Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 3 2026-02-19T08:12:11.672145Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2026-02-19T08:12:11.672213Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 1 is not local. 2026-02-19T08:12:11.672303Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-02-19T08:12:11.672333Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:11.672393Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2026-02-19T08:12:11.672411Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 4 is not local. 2026-02-19T08:12:11.672472Z node 1 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2026-02-19T08:12:11.672493Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 5 is not local. 2026-02-19T08:12:11.672558Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 6 2026-02-19T08:12:11.672617Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2026-02-19T08:12:11.672635Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 7 is not local. 2026-02-19T08:12:11.672660Z node 1 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2026-02-19T08:12:11.672676Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 8 is not local. 2026-02-19T08:12:11.672696Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:11.672797Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-02-19T08:12:11.672827Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 7683254948007689230 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2026-02-19T08:11:52.101436Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8837:948] 2026-02-19T08:11:52.101929Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8844:955] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-02-19T08:11:54.432500Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8851:962] 2026-02-19T08:11:54.432655Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8844:955] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-02-19T08:11:59.439461Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8865:976] 2026-02-19T08:11:59.439546Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8858:969] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-02-19T08:12:02.068330Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8872:983] 2026-02-19T08:12:02.068438Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8865:976] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-02-19T08:12:04.788871Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8879:990] 2026-02-19T08:12:04.788983Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8872:983] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2026-02-19T08:12:08.584633Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8879:990] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:11:47.374341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:47.509835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:11:47.527971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:11:47.528056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:11:47.528496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:11:47.925938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:47.926059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:48.051746Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488703833148 != 1771488703833152 2026-02-19T08:11:48.077665Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:48.151098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:48.245075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:11:48.567430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:48.590304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:48.701064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:48.763374Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:914:2786] 2026-02-19T08:11:48.763699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:48.817227Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:48.817342Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:48.819553Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:48.819639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:48.819717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:48.820116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:48.820402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:48.820480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:948:2786] in generation 1 2026-02-19T08:11:48.820864Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:918:2789] 2026-02-19T08:11:48.821056Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:48.838391Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:921:2792] 2026-02-19T08:11:48.838629Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:48.855427Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:48.855639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:48.857078Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:11:48.857197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:11:48.857258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:11:48.857548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:48.862080Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:48.862174Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:962:2789] in generation 1 2026-02-19T08:11:48.862586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:48.862714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:48.864081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:11:48.864146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:11:48.864189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:11:48.864529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:48.864653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:48.864710Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:963:2792] in generation 1 2026-02-19T08:11:48.876848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:48.951844Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:48.952076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:48.952200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:967:2818] 2026-02-19T08:11:48.952258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:48.952294Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:48.952331Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:48.952916Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:48.952977Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:11:48.953074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:48.953133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:968:2819] 2026-02-19T08:11:48.953158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:11:48.953193Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:11:48.953221Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:11:48.953305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:48.953335Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:11:48.953390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:48.953441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037890, actorId: [1:969:2820] 2026-02-19T08:11:48.953462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:11:48.953482Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:11:48.953503Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:11:48.953690Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:48.953954Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:48.954300Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:48.954347Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:48.954388Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:48.954450Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:48.954669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:904:2781], serverId# [1:915:2787], sessionId# [0:0:0] 2026-02-19T08:11:48.954722Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:11:48.954783Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:11:48.954835Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-02-19T08:11:48.954926Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2026-02-19T08:11:48.955473Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:48.955786Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186 ... 037888 2026-02-19T08:12:10.350839Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2026-02-19T08:12:10.350899Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1287:3026] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2026-02-19T08:12:10.351000Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:12:10.351040Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [3000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1287:3026], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:12:10.351087Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 3000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2026-02-19T08:12:10.351114Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:12:10.351187Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2026-02-19T08:12:10.351222Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1287:3026] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2026-02-19T08:12:10.351265Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1287:3026] Reply: txId# 281474976715662, status# OK, error# 2026-02-19T08:12:10.351709Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037889 2026-02-19T08:12:10.351762Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4508: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-02-19T08:12:10.351933Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1282:3022], serverId# [3:1283:3023], sessionId# [0:0:0] 2026-02-19T08:12:10.352017Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:12:10.352054Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:10.352103Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:12:10.352169Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:12:10.353375Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:12:10.353992Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:12:10.354258Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:12:10.354328Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.354381Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2026-02-19T08:12:10.354643Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.354730Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:12:10.355424Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2026-02-19T08:12:10.355712Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:12:10.355934Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2026-02-19T08:12:10.356006Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2026-02-19T08:12:10.364644Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037889 2026-02-19T08:12:10.364717Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715664, at: 72075186224037889 2026-02-19T08:12:10.364943Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:12:10.364979Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.365014Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2026-02-19T08:12:10.365132Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:10.365192Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:12:10.365249Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:12:10.424556Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:10.424978Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:12:10.433747Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:10.433864Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.433939Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:12:10.434278Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.434384Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:10.435216Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2026-02-19T08:12:10.435504Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:12:10.435672Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2026-02-19T08:12:10.435735Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2026-02-19T08:12:10.437788Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:12:10.437844Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715665, at: 72075186224037888 2026-02-19T08:12:10.438086Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:10.438133Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.438172Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2026-02-19T08:12:10.438321Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:10.438389Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:10.438431Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:10.482998Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-02-19T08:12:10.483406Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-02-19T08:12:10.483616Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:12:10.483672Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.483724Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2026-02-19T08:12:10.483984Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.484058Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:12:10.484769Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2026-02-19T08:12:10.485008Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:12:10.485133Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2026-02-19T08:12:10.485188Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2026-02-19T08:12:10.490023Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037890 2026-02-19T08:12:10.490092Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715666, at: 72075186224037890 2026-02-19T08:12:10.490262Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:12:10.490296Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:12:10.490333Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2026-02-19T08:12:10.490462Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:10.490521Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:12:10.490567Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpdateOn >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::ShouldBePings >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] |90.2%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteOnReturning+Covered [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-02-19T08:12:14.456672Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.466710Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-02-19T08:12:14.467209Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.467382Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 1 2026-02-19T08:12:14.467596Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.467835Z node 3 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.467990Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-02-19T08:12:14.468016Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.468080Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2026-02-19T08:12:14.468147Z node 3 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.468264Z node 4 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-02-19T08:12:14.468345Z node 3 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 3 2026-02-19T08:12:14.468394Z node 3 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:14.468536Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-02-19T08:12:14.468556Z node 3 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.468597Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-02-19T08:12:14.468637Z node 4 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.468725Z node 1 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-02-19T08:12:14.468778Z node 4 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 4 2026-02-19T08:12:14.468803Z node 4 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:12:14.468871Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-02-19T08:12:14.468896Z node 4 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.468973Z node 2 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-02-19T08:12:14.480636Z node 4 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.480718Z node 4 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.480755Z node 3 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.480778Z node 3 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.498589Z node 2 :STATISTICS DEBUG: service_impl.cpp:400: Skip TEvKeepAliveTimeout 2026-02-19T08:12:14.498678Z node 1 :STATISTICS INFO: service_impl.cpp:415: Node 2 is unavailable 2026-02-19T08:12:14.498713Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:14.498820Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.498862Z node 1 :STATISTICS DEBUG: service_impl.cpp:392: Skip TEvKeepAliveTimeout 2026-02-19T08:12:14.498944Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.498967Z node 1 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.499083Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.499116Z node 1 :STATISTICS DEBUG: service_impl.cpp:427: Skip TEvAggregateKeepAlive |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-02-19T08:12:14.846038Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.855462Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-02-19T08:12:14.855883Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.856050Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 1 2026-02-19T08:12:14.856249Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.856483Z node 3 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.856616Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-02-19T08:12:14.856641Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.856699Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2026-02-19T08:12:14.856750Z node 3 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.856871Z node 4 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-02-19T08:12:14.856973Z node 3 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 3 2026-02-19T08:12:14.857024Z node 3 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:14.857165Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-02-19T08:12:14.857186Z node 3 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.857231Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-02-19T08:12:14.857274Z node 4 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:14.857361Z node 1 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-02-19T08:12:14.857421Z node 4 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 4 2026-02-19T08:12:14.857449Z node 4 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:12:14.857513Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-02-19T08:12:14.857548Z node 4 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:14.857632Z node 2 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-02-19T08:12:14.868008Z node 4 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.868087Z node 4 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.868146Z node 3 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.868172Z node 3 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.881916Z node 2 :STATISTICS DEBUG: service_impl.cpp:400: Skip TEvKeepAliveTimeout 2026-02-19T08:12:14.882010Z node 1 :STATISTICS INFO: service_impl.cpp:415: Node 2 is unavailable 2026-02-19T08:12:14.882046Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:14.882149Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.882209Z node 1 :STATISTICS DEBUG: service_impl.cpp:392: Skip TEvKeepAliveTimeout 2026-02-19T08:12:14.882276Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.882300Z node 1 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:12:14.882415Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:14.882455Z node 1 :STATISTICS DEBUG: service_impl.cpp:427: Skip TEvAggregateKeepAlive |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2026-02-19T08:12:15.014804Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:15.024023Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2026-02-19T08:12:15.024394Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.024498Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 1 2026-02-19T08:12:15.024674Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2026-02-19T08:12:15.024730Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.024784Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2026-02-19T08:12:15.024816Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.024859Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2026-02-19T08:12:15.024884Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.024992Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2026-02-19T08:12:15.025022Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.025080Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2026-02-19T08:12:15.025116Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.025169Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 3 2026-02-19T08:12:15.025347Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2026-02-19T08:12:15.025384Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.025425Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 5 2026-02-19T08:12:15.025476Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-02-19T08:12:15.025493Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.025521Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2026-02-19T08:12:15.025551Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:15.025608Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2026-02-19T08:12:15.025626Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.025646Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 7 2026-02-19T08:12:15.025699Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2026-02-19T08:12:15.025762Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.036987Z node 1 :STATISTICS DEBUG: service_impl.cpp:965: Tablet 1 has already been processed 2026-02-19T08:12:15.037072Z node 1 :STATISTICS ERROR: service_impl.cpp:969: No result was received from the tablet 2 2026-02-19T08:12:15.037105Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 2 is not local. 2026-02-19T08:12:15.037207Z node 1 :STATISTICS DEBUG: service_impl.cpp:965: Tablet 3 has already been processed 2026-02-19T08:12:15.037253Z node 1 :STATISTICS ERROR: service_impl.cpp:969: No result was received from the tablet 4 2026-02-19T08:12:15.037272Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 4 is not local. 2026-02-19T08:12:15.037311Z node 1 :STATISTICS DEBUG: service_impl.cpp:965: Tablet 5 has already been processed 2026-02-19T08:12:15.037334Z node 1 :STATISTICS ERROR: service_impl.cpp:969: No result was received from the tablet 6 2026-02-19T08:12:15.037354Z node 1 :STATISTICS DEBUG: service_impl.cpp:1000: Tablet 6 is not local. 2026-02-19T08:12:15.037386Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:15.037481Z node 1 :STATISTICS DEBUG: service_impl.cpp:251: Event round 1 is different from the current 0 2026-02-19T08:12:15.037511Z node 1 :STATISTICS DEBUG: service_impl.cpp:958: Skip TEvStatisticsRequestTimeout 2026-02-19T08:12:15.037573Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2026-02-19T08:12:15.037598Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.037662Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2026-02-19T08:12:15.037684Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.037754Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-02-19T08:12:15.037775Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2026-02-19T08:12:14.880173Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:14.888790Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:15.005819Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 1 2026-02-19T08:12:15.005960Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 2 2026-02-19T08:12:15.006002Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:15.006775Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2026-02-19T08:12:15.006834Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.006932Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2026-02-19T08:12:15.006955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:15.007022Z node 1 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-02-19T08:12:15.007067Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 >> TBlobStorageProxyTest::TestBlockPersistence >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> TBlobStorageProxyTest::TestBlock >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2026-02-19T08:12:16.257824Z node 1 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:16.266758Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-02-19T08:12:16.267193Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.267513Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 1 2026-02-19T08:12:16.267793Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:16.267946Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2026-02-19T08:12:16.268017Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.268396Z node 3 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-02-19T08:12:16.268570Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2026-02-19T08:12:16.268625Z node 1 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.268848Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [2:44:2057], tablet id = 4, status = OK 2026-02-19T08:12:16.268909Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.268980Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-02-19T08:12:16.269006Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.269066Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 2 2026-02-19T08:12:16.269259Z node 4 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-02-19T08:12:16.269355Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [3:46:2057], tablet id = 5, status = OK 2026-02-19T08:12:16.269405Z node 3 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [3:46:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.269479Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 4 2026-02-19T08:12:16.269595Z node 1 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 3 2026-02-19T08:12:16.269685Z node 3 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 5 2026-02-19T08:12:16.269786Z node 3 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:16.269917Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2026-02-19T08:12:16.269970Z node 4 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2026-02-19T08:12:16.270090Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-02-19T08:12:16.270115Z node 2 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.270162Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2026-02-19T08:12:16.270185Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.270224Z node 3 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2026-02-19T08:12:16.270248Z node 3 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.270301Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2026-02-19T08:12:16.270330Z node 1 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.270395Z node 4 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 6 2026-02-19T08:12:16.270451Z node 4 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:12:16.270577Z node 4 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2026-02-19T08:12:16.270611Z node 4 :STATISTICS DEBUG: service_impl.cpp:1047: Skip EvClientConnected 2026-02-19T08:12:16.270739Z node 1 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-02-19T08:12:16.270893Z node 2 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-02-19T08:12:16.270951Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 2026-02-19T08:12:16.271153Z node 1 :STATISTICS DEBUG: service_impl.cpp:447: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-02-19T08:12:16.271205Z node 1 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 1 >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteOnReturning+Covered [GOOD] Test command err: 2026-02-19T08:11:56.592522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486104549690853:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:56.592584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:56.939270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:56.945936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:56.946026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:57.029414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:57.061481Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:57.062755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486104549690822:2081] 1771488716588421 != 1771488716588424 2026-02-19T08:11:57.140557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:57.140581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:57.140588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:57.140671Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:57.230335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:57.613802Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:57.646568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:57.724892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.910075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.095016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.203938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.182786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486121729561905:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.182901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.183238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486121729561915:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.183272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.515532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.554038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.595581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.643705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.687945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.771757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.806843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.852550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.939005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486121729562788:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.939091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.939231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486121729562793:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.939347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486121729562795:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.939426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.943640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:00.964071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486121729562797:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:01.066620Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486126024530144:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:01.594984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486104549690853:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:01.595055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:02.835320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:03.219444Z no ... adShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715769 2026-02-19T08:12:13.077932Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:13.078007Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:13.078133Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715769 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:12:13.079111Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:13.079230Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:13.080336Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:13.080413Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:13.083289Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715769 2026-02-19T08:12:13.083399Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715769 2026-02-19T08:12:13.083762Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:13.083872Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:13.083897Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:12:13.084178Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:12:13.084260Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486172619102297:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 6049, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:13.084273Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:12:13.084491Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:12:13.084723Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" data_columns: "emb" data_columns: "data" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488732 } EndTime { seconds: 1771488733 } UserSID: "" } 2026-02-19T08:12:13.090460Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2026-02-19T08:12:13.090489Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2026-02-19T08:12:13.090503Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2026-02-19T08:12:13.090520Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found >> TBlobStorageProxyTest::TestSingleFailure >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin |90.2%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestDoubleEmptyGet |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPartialGetBlock |90.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TBlobStorageProxyTest::TestVPutVGetLimit >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdateFilterClusterChangeReturning-Covered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning-Covered-Overlap [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::Interactive >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestEmptyDiscover >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning+Covered+Overlap [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsert-Returning+Covered [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdateFilterClusterChangeReturning-Covered [GOOD] Test command err: 2026-02-19T08:11:57.709079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486108370017634:2218];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:57.712716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:58.043676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:58.047654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:58.047784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:58.051614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:58.132724Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:58.173833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486108370017453:2081] 1771488717691555 != 1771488717691558 2026-02-19T08:11:58.290367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:58.290415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:58.290423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:58.290506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:58.316797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:58.717815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:58.729890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:58.737920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:58.792697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.992656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.168603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.244890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.222532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486125549888526:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.222685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.223134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486125549888536:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.223209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.575089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.611768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.655771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.703318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.740775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.780640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.813869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.868645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.965805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486125549889409:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.965924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.966060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486125549889414:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.966095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486125549889415:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.966192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.971242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:01.988495Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486125549889418:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:02.093431Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486129844856765:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:02.708633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486108370017634:2218];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:02.708717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:03.885778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... TxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715767 2026-02-19T08:12:14.307272Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:14.307343Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.307514Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715767 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:12:14.312928Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715767 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:14.313070Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715767 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:14.314534Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:14.314631Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.336395Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2026-02-19T08:12:14.336436Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2026-02-19T08:12:14.336455Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2026-02-19T08:12:14.338658Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715767 2026-02-19T08:12:14.338792Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715767 2026-02-19T08:12:14.339071Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:12:14.339157Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.339179Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:12:14.339404Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:12:14.339470Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175290837919:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715766, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715767, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 6338, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.339483Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:12:14.347568Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:12:14.347820Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488733 } EndTime { seconds: 1771488734 } UserSID: "" } 2026-02-19T08:12:14.358197Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2026-02-19T08:12:14.415331Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:12:16.464842Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning-Covered-Overlap [GOOD] Test command err: 2026-02-19T08:11:57.002973Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486105298641156:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:57.003037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:57.039401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:57.302975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:57.329130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:57.329244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:57.398132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:57.413775Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486101003673676:2081] 1771488716966057 != 1771488716966060 2026-02-19T08:11:57.419590Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:57.480150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:57.559609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:57.559630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:57.559641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:57.559740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:58.014124Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:58.051047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:58.071656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:11:58.149004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.369196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.557647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.634902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.605581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118183544737:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.605778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.606266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118183544747:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.606316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.992032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.028285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.099454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.139560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.174794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.210183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.244839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.305949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.411613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122478512918:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.411693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.411948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122478512924:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.411967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.412007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122478512923:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.418565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:01.434246Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486122478512927:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:01.499337Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486122478512978:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:02.006098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486105298641156:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:02.006158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... TxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710769 2026-02-19T08:12:14.525260Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:14.525336Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.525464Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976715675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976710757 } Internal: true } TxId: 281474976710769 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:12:14.526435Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976715675, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:14.526568Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:14.529134Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:14.529252Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.531377Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976715675, txId# 281474976710769 2026-02-19T08:12:14.531487Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710769 2026-02-19T08:12:14.531786Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:14.531891Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.531916Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:12:14.532155Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done 2026-02-19T08:12:14.532223Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486175672765127:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2424 ReadRows: 90 ReadBytes: 2310 CpuTimeUs: 5570, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:14.532236Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715675, subscribers count# 1 2026-02-19T08:12:14.537046Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715675 2026-02-19T08:12:14.537283Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488733 } EndTime { seconds: 1771488734 } UserSID: "" } 2026-02-19T08:12:14.562419Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2026-02-19T08:12:14.562451Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2026-02-19T08:12:14.562465Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2026-02-19T08:12:14.562483Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2026-02-19T08:12:14.578463Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:12:16.759879Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::SameConfigurationTest [GOOD] >> TStateStorageConfig::Tablet72075186224040026Test [GOOD] >> TStateStorageConfig::NonDuplicatedNodesTest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestNormal >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexUpdatePkClusterChangeReturning+Covered+Overlap [GOOD] Test command err: 2026-02-19T08:11:57.496810Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486107403903600:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:57.497055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:57.527217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:57.783634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:57.783769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:57.789347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:57.836053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:57.872779Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:57.937692Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:11:57.937730Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:11:58.033654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:58.033686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:58.033695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:58.033815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:58.070765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:58.495692Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:58.536765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:58.600168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.785475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.963830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.055968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.047801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486124583774462:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.047906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.048202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486124583774472:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.048254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.347808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.388838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.460555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.515401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.562631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.609415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.664845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.728135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.820474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486124583775347:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.820611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.820854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486124583775352:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.820914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486124583775353:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.821049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.825893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:01.837452Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486124583775356:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:01.899713Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486124583775407:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:02.496411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486107403903600:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:02.496471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:03.730111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is u ... ws: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710775 2026-02-19T08:12:15.612157Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:15.612233Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:15.612375Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976715675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976710757 } Internal: true } TxId: 281474976710775 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:12:15.613572Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976715675, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710775 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:15.613728Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710775 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:12:15.614512Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:15.614643Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:15.616815Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976715675, txId# 281474976710775 2026-02-19T08:12:15.616913Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710775 2026-02-19T08:12:15.617235Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:12:15.617338Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:15.617382Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:12:15.617630Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done 2026-02-19T08:12:15.619814Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486179249206120:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710774, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710775, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 351 UploadBytes: 9534 ReadRows: 300 ReadBytes: 8580 CpuTimeUs: 17800, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:15.620788Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715675, subscribers count# 1 2026-02-19T08:12:15.622316Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715675 2026-02-19T08:12:15.622560Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" data_columns: "emb" data_columns: "data" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488734 } EndTime { seconds: 1771488735 } UserSID: "" } 2026-02-19T08:12:15.623628Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037944 not found 2026-02-19T08:12:15.623655Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037943 not found 2026-02-19T08:12:15.623667Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037941 not found 2026-02-19T08:12:15.623682Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037942 not found 2026-02-19T08:12:17.581263Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestSingleFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexInsert-Returning+Covered [GOOD] Test command err: 2026-02-19T08:11:56.281584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486101433270216:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:56.282326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:56.566827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:56.596086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:56.596178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:56.691160Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486101433270174:2081] 1771488716277694 != 1771488716277697 2026-02-19T08:11:56.703235Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:56.709053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:56.840731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:56.862463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:56.862485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:56.862492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:56.862598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:57.309748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:57.378236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:57.383876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:57.432221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.608580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.815630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.905406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.853596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486114318173941:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.853729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.854710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486114318173951:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.854780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.296941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.386050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.478195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.562430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.602668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.637991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.683876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.725900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.806865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118613142127:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.806956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.807299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118613142133:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.807358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118613142132:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.807469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.811223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:00.822142Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486118613142136:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:00.900347Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486118613142189:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:01.281892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486101433270216:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:01.281975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:02.656636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486127203077086:3981], Recipient [1:7608486101433270531:2147]: NKikimr::TE ... eWork, received event# 269877763, Sender [2:7608486183153311767:4772], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [2:7608486183153311767:4772] ServerId: [2:7608486183153311769:4774] } 2026-02-19T08:12:15.762686Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:12:15.762696Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-02-19T08:12:15.762734Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:12:15.762809Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:12:15.762907Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486183153310881:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8617, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:12:15.762921Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:12:15.762934Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:12:15.762960Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:12:15.763009Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:7608486183153310881:2561] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710675 at schemeshard: 72057594046644480 2026-02-19T08:12:15.763266Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486183153311498:4549], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037938 ClientId: [2:7608486183153311498:4549] ServerId: [2:7608486183153311504:4551] } 2026-02-19T08:12:15.763277Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2026-02-19T08:12:15.763279Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:12:15.763287Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037938, from:72057594046644480 is reset 2026-02-19T08:12:15.763295Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2026-02-19T08:12:15.763310Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2026-02-19T08:12:15.763327Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2026-02-19T08:12:15.763339Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486183153311260:4373], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037936 ClientId: [2:7608486183153311260:4373] ServerId: [2:7608486183153311271:4380] } 2026-02-19T08:12:15.763349Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:12:15.763356Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037936, from:72057594046644480 is reset 2026-02-19T08:12:15.763397Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486183153311261:4374], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037937 ClientId: [2:7608486183153311261:4374] ServerId: [2:7608486183153311283:4384] } 2026-02-19T08:12:15.763407Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:12:15.763413Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037937, from:72057594046644480 is reset 2026-02-19T08:12:15.763453Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486183153311256:4369], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037935 ClientId: [2:7608486183153311256:4369] ServerId: [2:7608486183153311272:4381] } 2026-02-19T08:12:15.763462Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:12:15.763468Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037935, from:72057594046644480 is reset 2026-02-19T08:12:15.767406Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274792450, Sender [2:7608486183153310881:2561], Recipient [2:7608486153088536860:2155]: NKikimrIndexBuilder.TEvGetRequest DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:12:15.767451Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5442: StateWork, processing event TEvIndexBuilder::TEvGetRequest 2026-02-19T08:12:15.767543Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:12:15.767793Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" data_columns: "emb" data_columns: "data" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488735 } EndTime { seconds: 1771488735 } UserSID: "" } 2026-02-19T08:12:15.767814Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:12:15.767871Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:12:15.768002Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:7608486183153310881:2561] msg type: 274792451 msg: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" data_columns: "emb" data_columns: "data" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488735 } EndTime { seconds: 1771488735 } UserSID: "" } at schemeshard: 72057594046644480 2026-02-19T08:12:15.768265Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486183153310884:4095], Recipient [2:7608486153088536860:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:12:15.768284Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:12:15.768293Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:12:15.803788Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:12:16.466041Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:16.466083Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:16.466123Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486153088536860:2155], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:12:16.466139Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:12:16.738611Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:12:17.466819Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:17.466859Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:17.466905Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486153088536860:2155], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:12:17.466920Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:12:18.467646Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:18.467686Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:12:18.467729Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486153088536860:2155], Recipient [2:7608486153088536860:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:12:18.467744Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2026-02-19T08:12:18.609302Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a97/r3tmp/tmpUlE1X1//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-02-19T08:12:18.634580Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> KqpImmediateEffects::UpdateOn [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin >> TBlobStorageProxyTest::TestVGetNoData |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestVPutVGet >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestDoubleGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: 2026-02-19T08:12:06.588035Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486143855043654:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:06.588134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:06.872499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:06.872610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:06.926657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:06.942673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:06.948841Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:07.139822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:07.188242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:07.188263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:07.188269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:07.188379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:07.588057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:07.690882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:07.705394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:07.771433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.966733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.132855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.222554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.177026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486161034914455:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.177166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.177672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486161034914465:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.177787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.767610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.812472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.864973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.916506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.965920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.031068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.096282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.189333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.275854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486165329882632:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.275944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.276391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486165329882637:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.276429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486165329882638:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.276547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.281107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:11.297304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486165329882641:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:11.372578Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486165329882692:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:11.587267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486143855043654:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:11.587344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:12.987726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:14.965336Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486179422346451:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:14.965659Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:14.981045Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:15.084248Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:15.095066Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:15.095140Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:15.117113Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:15.278143Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:15.378353Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:15.378373Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:15.378381Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:15.378458Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:15.841614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:15.852347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:15.860663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.931301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:16.065276Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:16.118578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:16.204170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.471310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486196602217454:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.471409Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.471680Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486196602217464:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.471740Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.538702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.575915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.605926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.634090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.665510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.714295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.762089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.832392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.953193Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486196602218342:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.953335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.954871Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486196602218347:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.954941Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486196602218348:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.955048Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.959540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:18.971778Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486196602218351:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:19.052914Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486200897185698:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:19.962655Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486179422346451:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:19.962710Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:20.983377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] Test command err: 2026-02-19T08:12:06.043166Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486145669420093:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:06.044131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:06.085117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:06.332023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:06.332216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:06.355875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:06.379569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:06.426376Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:06.428996Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486145669419969:2081] 1771488726028190 != 1771488726028193 2026-02-19T08:12:06.580778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:06.580802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:06.580809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:06.580902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:06.607335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:07.056123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:07.229555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:07.240470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:07.307846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.500683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.718791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.784084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:09.808506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486158554323731:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.808628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.809148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486158554323741:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.809222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.288878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.333533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.366918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.424281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.458577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.499248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.537616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.591721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.675326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162849291906:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.675425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.675660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162849291911:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.675715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162849291912:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.675819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.680142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:10.695726Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486162849291915:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:10.769582Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486162849291966:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:11.041646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486145669420093:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:11.041761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... 1: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:14.604231Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:14.682340Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:14.683329Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486177895229569:2081] 1771488734582494 != 1771488734582497 2026-02-19T08:12:14.724632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:14.724712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:14.729037Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:14.786319Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:14.786346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:14.786354Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:14.786443Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:14.814635Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:15.239421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:15.250242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:15.264661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.286445Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.012673s 2026-02-19T08:12:15.375028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.594196Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:15.610939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.741582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.286916Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195075100621:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.287015Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.287300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195075100631:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.287356Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.360774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.408956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.457115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.495890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.539628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.589665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.634390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.689969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.785239Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195075101500:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.785350Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.789094Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195075101505:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.789183Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195075101506:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.789307Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.793887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:18.809007Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486195075101509:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:18.887694Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486195075101560:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:19.588676Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486177895229614:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:19.588775Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:20.965426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormal [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TSyncNeighborsTests::SerDes2 [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> KqpImmediateEffects::Interactive [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: 2026-02-19T08:12:00.990531Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486119132666945:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:00.991321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:01.295919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:01.296045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:01.297882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:01.366839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:01.470601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486119132666884:2081] 1771488720979129 != 1771488720979132 2026-02-19T08:12:01.470880Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:01.584812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:01.616813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:01.616844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:01.616851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:01.616922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:01.995455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:02.188271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:02.262798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:02.439555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:02.652291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:02.731668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:04.660504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486136312537943:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.660652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.669884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486136312537953:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:04.670021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.010704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.048284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.081642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.117171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.153120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.192513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.242604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.286457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.374076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486140607506122:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.374197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.374560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486140607506127:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.374599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486140607506128:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.374704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.378379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:05.389464Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486140607506131:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:05.491126Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486140607506182:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:05.989834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486119132666945:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:05.989939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:07.646589Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-02-19T08:12:07.666247Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Confl ... c_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:12:17.556074Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:17.556098Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:17.556105Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:17.556193Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:17.635175Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:18.028955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:18.035868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:18.043055Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.125160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.305053Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.354603Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:18.399320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.361841Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486212029151315:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.362229Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.372761Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486212029151397:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.372866Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.393603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.447690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.493120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.537925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.586909Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.635359Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.692502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.757323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:21.880967Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486212029152201:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.881085Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.881702Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486212029152206:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.881794Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486212029152207:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.881932Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:21.885845Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:21.899196Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486212029152210:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:21.975820Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486212029152261:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:22.326579Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486194849280354:2136];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:22.326661Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:24.714780Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608486224914054475:2544], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfa4tnfhnt07abwtrf51dy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2RhZDJlOGMtNzc4ZjdiYWQtYzRmNWFhMjItNTQwMWYxYTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-02-19T08:12:24.715670Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [3:7608486224914054476:2545], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01khtfa4tnfhnt07abwtrf51dy. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2RhZDJlOGMtNzc4ZjdiYWQtYzRmNWFhMjItNTQwMWYxYTY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7608486224914054472:2528], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-02-19T08:12:24.716093Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=M2RhZDJlOGMtNzc4ZjdiYWQtYzRmNWFhMjItNTQwMWYxYTY=, ActorId: [3:7608486224914054437:2528], ActorState: ExecuteState, LegacyTraceId: 01khtfa4tnfhnt07abwtrf51dy, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] Test command err: 2026-02-19T08:12:27.769928Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-02-19T08:12:27.770083Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:51: TEvQuerySyncToken, token sent (1): VDisk actor id# [0:1:1] actor id# [1:6:2053] active# 1 waiting# 0 >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease |90.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: 2026-02-19T08:12:05.628994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486140048299100:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:05.629615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:05.692341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:06.116449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:06.157052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:06.157224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:06.236168Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:06.239753Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486140048299059:2081] 1771488725576643 != 1771488725576646 2026-02-19T08:12:06.252316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:06.373803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:06.393251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:06.393272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:06.393279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:06.393372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:06.622070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:06.825197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:06.866380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:06.944839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.149896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.337787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.432287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:09.552724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486157228170127:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.552829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.553258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486157228170137:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.553293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:09.949499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.000850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.038983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.111621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.142021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.228673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.279949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.357239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.486060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486161523138308:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.486184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.490096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486161523138314:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.490165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486161523138315:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.490328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.494822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:10.508418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-02-19T08:12:10.508592Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486161523138319:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:10.601320Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486161523138370:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:10.623282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486140048299100:2078];send_to=[0:7307199536658146131:7762515]; 202 ... Disconnected -> Connecting 2026-02-19T08:12:14.005907Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486173687925885:2081] 1771488733732637 != 1771488733732640 2026-02-19T08:12:14.027407Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:14.033846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:14.035904Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:14.270332Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:14.270355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:14.270362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:14.270440Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:14.454121Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:14.739692Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:14.942881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:14.964231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.044800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.273476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.361222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.115123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195162764240:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.115212Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.115625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195162764250:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.115664Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.202642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.242188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.287135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.350843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.439478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.505732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.584041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.638633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:18.734755Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486173687925936:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:18.734825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:18.743481Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195162765125:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.743586Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.743906Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195162765131:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.743924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486195162765130:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.743941Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:18.748071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:18.760740Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486195162765134:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:18.848300Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486195162765187:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:20.410840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:20.470075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:20.542806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: 2026-02-19T08:12:01.745292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486125427446096:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:01.745337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:02.047278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:02.066843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:02.066938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:02.150320Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:02.156177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486125427445857:2081] 1771488721675503 != 1771488721675506 2026-02-19T08:12:02.167080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:02.345780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:02.378417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:02.378447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:02.378465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:02.378573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:02.743903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:02.867176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:02.890112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:02.968555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:03.155256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:03.339182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:03.432053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.382156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486142607316922:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.382301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.383742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486142607316932:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.383829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:05.824258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.863435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.925356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:05.972052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:06.014116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:06.096145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:06.135941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:06.198321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:06.306471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486146902285113:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:06.306572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:06.306946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486146902285118:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:06.306979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486146902285119:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:06.307103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:06.312137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:06.332263Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486146902285122:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:06.419569Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486146902285175:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:06.746213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486125427446096:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:06.746281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:08.578789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... e.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:16.799711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:19.067893Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:19.068026Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:19.149413Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:19.150261Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608486196247150874:2081] 1771488738985414 != 1771488738985417 2026-02-19T08:12:19.156277Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:19.156355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:19.161021Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:19.337173Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:19.337195Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:19.337202Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:19.337281Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:19.359193Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:19.895815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:19.902944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:19.913291Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:20.033092Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:20.064803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:20.317206Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:20.429851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:22.955127Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486213427021922:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:22.955227Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:22.955533Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486213427021932:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:22.955575Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.035357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.077197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.114281Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.154705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.186442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.237915Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.296485Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.371051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.584590Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486217721990101:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.584712Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.585093Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486217721990106:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.585135Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486217721990107:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.585230Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:23.589588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:23.616145Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486217721990110:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:23.679794Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486217721990161:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:25.486869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2026-02-19T08:12:29.051483Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-02-19T08:12:29.051615Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-02-19T08:12:29.051675Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:111: ProcessQueue(), token sent: VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 0 waiting# 1 2026-02-19T08:12:29.051714Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:122: ProcessQueue() done: active# 1 waiting# 0 2026-02-19T08:12:29.051759Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-02-19T08:12:29.156564Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-02-19T08:12:29.156693Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-02-19T08:12:29.156746Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:161: TEvReleaseSyncToken, removed from queue: VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 0 >> TSyncBrokerTests::ShouldEnqueue >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId >> TSyncNeighborsTests::SerDes3 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] >> TSyncBrokerTests::ShouldReturnToken >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TSyncBrokerTests::ShouldReturnToken [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldReleaseToken [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2026-02-19T08:12:29.999957Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-02-19T08:12:30.000081Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-02-19T08:12:30.118961Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-02-19T08:12:30.119131Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-02-19T08:12:30.119205Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:82: TEvQuerySyncToken, enqueued (1): VDisk actor id# [0:1:2] actor id# [2:7:2054] active# 1 waiting# 1 >> TBlobStorageProxyTest::TestVBlockVPutVGet >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2026-02-19T08:12:30.184899Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-02-19T08:12:30.295854Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-02-19T08:12:30.295953Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 0 waiting# 0 >> KqpFail::OnPrepare >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpWrite::UpsertNullKey >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink |90.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/vector/ydb-core-kqp-ut-indexes-vector >> DataShardSnapshots::VolatileSnapshotSplit >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup >> KqpWrite::ProjectReplace+UseSink >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/vector/ydb-core-kqp-ut-indexes-vector >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] >> KqpWrite::Insert >> TStateStorageConfig::NonDuplicatedNodesTest [GOOD] >> Cdc::NaN[TopicRunner] [GOOD] >> TStateStorageConfig::DuplicatedNodesTest >> Cdc::RacyRebootAndSplitWithTxInflight >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> Cdc::UpdateStream [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin [GOOD] >> Cdc::UpdateShardCount >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> KqpInplaceUpdate::SingleRowStr+UseSink >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |90.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] Test command err: 2026-02-19T08:12:33.648688Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a85/r3tmp/tmpX5F8j1//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-02-19T08:12:33.649963Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a85/r3tmp/tmpX5F8j1//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-02-19T08:12:33.663817Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:33.663984Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |90.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/vector/ydb-core-kqp-ut-indexes-vector ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: 2026-02-19T08:12:05.837342Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486142041272792:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:05.837395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:05.864182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:06.203844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:06.204250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:06.205969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:06.210790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:06.392837Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:06.413328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:06.686266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:06.686293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:06.686299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:06.686386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:06.834383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:07.522062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:07.547628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:07.606510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.849444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.062843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.137936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.317018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486163516110894:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.317141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.317631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486163516110904:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.317673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.762316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.805882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.836969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486142041272792:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:10.837059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:10.856001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.919735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.972863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.062073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.102681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.154827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.253379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486167811079076:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.253458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.253489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486167811079081:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.253868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486167811079083:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.253924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.256969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:11.269760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486167811079084:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:11.334967Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486167811079136:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:13.003604Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026 ... ice] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:23.671531Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:23.671553Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:23.671559Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:23.671637Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:24.158352Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:24.496383Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:24.508917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:24.512559Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:24.590105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:24.777697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:24.858378Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.940446Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486237464445572:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.940519Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.940761Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486237464445582:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.940790Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.024954Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.078294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.123924Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.137880Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486220284574529:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:28.137938Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:28.181701Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.229089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.292459Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.369450Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.453800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.603305Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486241759413751:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.603400Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.603828Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486241759413756:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.603868Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486241759413757:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.603972Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.608287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:28.630351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:12:28.631154Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486241759413760:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:28.710349Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486241759413811:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:31.172575Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608486254644316021:2543], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01khtfab7797e6ra1zjjtnqz77. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=OTE4OTNlN2ItZjA2NmZhZTctZDY3Y2YyNGItMTU0ODc0ZTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-02-19T08:12:31.173448Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [3:7608486254644316022:2544], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01khtfab7797e6ra1zjjtnqz77. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=OTE4OTNlN2ItZjA2NmZhZTctZDY3Y2YyNGItMTU0ODc0ZTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7608486254644316018:2529], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-02-19T08:12:31.174336Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=OTE4OTNlN2ItZjA2NmZhZTctZDY3Y2YyNGItMTU0ODc0ZTc=, ActorId: [3:7608486250349348689:2529], ActorState: ExecuteState, LegacyTraceId: 01khtfab7797e6ra1zjjtnqz77, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# >> Cdc::HugeKey[YdsRunner] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: 2026-02-19T08:12:06.982263Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486147060630167:2172];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:06.982319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:07.336181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:07.352963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:07.353074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:07.443101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486147060630014:2081] 1771488726882099 != 1771488726882102 2026-02-19T08:12:07.461191Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:07.465944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:07.692310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:07.749702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:07.749752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:07.749759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:07.749840Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:07.987498Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:08.282875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:08.325968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.580272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.771395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:08.865342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.122655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486168535468376:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.122811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.124707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486168535468385:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.124796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:11.594404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.671562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.708322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.789508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.838456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.873614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.910846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.963508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:11.970748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486147060630167:2172];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:11.970821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:12.066327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486172830436554:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:12.066413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:12.066669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486172830436559:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:12.066701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486172830436560:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:12.066808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:12.071315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:12.088573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486172830436563:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:12.152359Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486172830436617:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:13.773188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:15.778427Z no ... rd__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:25.380162Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:25.498937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:25.514524Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:25.682413Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:25.779800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:28.901483Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486239857971918:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.901595Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.902334Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486239857971928:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:28.902400Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.011505Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.051476Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.088356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.122452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.161802Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.207573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.261896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.333054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:29.402569Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486222678100909:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:29.402680Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:29.451478Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486244152940096:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.451582Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.451667Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486244152940101:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.455124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:29.458135Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486244152940103:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.458893Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:29.472778Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486244152940104:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:29.544230Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486244152940158:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:31.490941Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:32.193742Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710677; 2026-02-19T08:12:32.206628Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [3:7608486257037842627:2568], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7608486252742875279:2568]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7608486257037842627:2568].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-02-19T08:12:32.207167Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608486257037842621:2568], SessionActorId: [3:7608486252742875279:2568], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7608486252742875279:2568]. 2026-02-19T08:12:32.207456Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=NTM4MDM2OGUtMWI3MWMzNGMtMTI4MWE2MzgtMTJiNTYyMTc=, ActorId: [3:7608486252742875279:2568], ActorState: ExecuteState, LegacyTraceId: 01khtfachq48fp9na0wz6t6yyx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7608486257037842622:2568] from: [3:7608486257037842621:2568] trace_id# 2026-02-19T08:12:32.207581Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608486257037842622:2568] TxId: 281474976710677. Ctx: { TraceId: 01khtfachq48fp9na0wz6t6yyx, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NTM4MDM2OGUtMWI3MWMzNGMtMTI4MWE2MzgtMTJiNTYyMTc=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ] trace_id# 2026-02-19T08:12:32.207953Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=NTM4MDM2OGUtMWI3MWMzNGMtMTI4MWE2MzgtMTJiNTYyMTc=, ActorId: [3:7608486252742875279:2568], ActorState: ExecuteState, LegacyTraceId: 01khtfachq48fp9na0wz6t6yyx, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } trace_id# >> KqpWrite::UpsertNullKey [GOOD] >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::OutOfSpace >> Cdc::HugeKey[TopicRunner] >> DataShardSnapshots::VolatileSnapshotMerge >> KqpWrite::ProjectReplace-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:30.146808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:30.146942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:30.147023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:30.147072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:30.147114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:30.147153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:30.147188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:30.147231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:30.147320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:30.147424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:30.148290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:30.148634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:30.269704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:30.269820Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:30.270606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:30.282474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:30.282593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:30.282751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:30.300789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:30.301496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:30.302305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:30.302690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:30.314452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:30.314679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:30.316701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:30.316780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:30.317025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:30.317081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:30.317135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:30.317491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.324781Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:30.453091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:30.453354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.453608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:30.453654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:30.453947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:30.454033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:30.456736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:30.457049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:30.457292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.457379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:30.457445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:30.457506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:30.459909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.459975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:30.460025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:30.461988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.462063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:30.462149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:30.462201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:30.466083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:30.468802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:30.469026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:30.470107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:30.470257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:30.470312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:30.470588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:30.470623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:30.470780Z node 1 :FLAT_TX_SCH ... as 3 2026-02-19T08:12:34.261409Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:12:34.261443Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:12:34.261474Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:12:34.261496Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-02-19T08:12:34.261521Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-02-19T08:12:34.273034Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.273133Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.273168Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:34.273200Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:12:34.273234Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:12:34.273892Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.273956Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.273981Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:34.274004Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:12:34.274032Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:12:34.275212Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.275294Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.275326Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:34.275357Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-02-19T08:12:34.275390Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:12:34.276441Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.276522Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:34.276555Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:34.276590Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-02-19T08:12:34.276622Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-02-19T08:12:34.276690Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:12:34.280006Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:34.280139Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:34.282113Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:34.282268Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:12:34.283814Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:12:34.283862Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:12:34.285751Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:12:34.285961Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.286008Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2679:4666] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:12:34.287469Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:12:34.287515Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:12:34.287674Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:12:34.287705Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:12:34.287771Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:12:34.287800Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:12:34.287861Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:12:34.287887Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:12:34.287945Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:12:34.287972Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:12:34.290018Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:12:34.290160Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.290206Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2682:4669] 2026-02-19T08:12:34.290520Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:12:34.290674Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:12:34.290733Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.290766Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2682:4669] 2026-02-19T08:12:34.290983Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:12:34.291042Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.291073Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2682:4669] 2026-02-19T08:12:34.291229Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:12:34.291298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.291328Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2682:4669] 2026-02-19T08:12:34.291480Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:12:34.291515Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2682:4669] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: 2026-02-19T08:12:05.915983Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486140656685750:2225];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:05.916055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:06.317141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:06.353932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486140656685528:2081] 1771488725854265 != 1771488725854268 2026-02-19T08:12:06.367354Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:06.382034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:06.382159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:06.408485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:06.518387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:06.520579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:06.520602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:06.520613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:06.520706Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:06.923072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:07.135972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:07.222082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.253075Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.007333s 2026-02-19T08:12:07.408931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.626592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:07.718115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.114071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162131523886:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.114288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.116103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162131523896:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.116196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.537431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.582955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.624000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.698605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.739209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.776815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.819600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.867887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:10.917315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486140656685750:2225];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:10.917414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:10.974082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162131524769:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.974197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.978043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162131524774:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.978124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486162131524775:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.978288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:10.984024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:11.002027Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486162131524778:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:11.131692Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486166426492127:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:12.683002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB cal ... OpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:23.729363Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:23.905399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:24.004508Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:26.930558Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486229761035117:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:26.930668Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:26.931277Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486229761035127:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:26.931349Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.012912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.070776Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.123614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.175094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.224266Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.304055Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.354528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.464639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:27.591855Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486234056003292:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.591954Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.592046Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486234056003297:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.592321Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486234056003299:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.592371Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:27.596097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:27.613584Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486234056003300:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:27.709173Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486234056003352:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:27.741745Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486212581164129:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:27.741841Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:29.782312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:30.504837Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2026-02-19T08:12:30.505077Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-02-19T08:12:30.505292Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-02-19T08:12:30.505575Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [3:7608486246940905815:2540], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7608486242645938267:2540]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7608486246940905815:2540].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-02-19T08:12:30.506390Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608486246940905783:2540], SessionActorId: [3:7608486242645938267:2540], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[3:7608486242645938267:2540]. 2026-02-19T08:12:30.506709Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=MzhjNzhhMzctMTMzMTYwZDMtNzFjMTYxMTktMTQ5M2IwMmE=, ActorId: [3:7608486242645938267:2540], ActorState: ExecuteState, LegacyTraceId: 01khtfaarg1jr4z7jdpkknf4hb, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7608486246940905810:2540] from: [3:7608486246940905783:2540] trace_id# 2026-02-19T08:12:30.506850Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608486246940905810:2540] TxId: 281474976710677. Ctx: { TraceId: 01khtfaarg1jr4z7jdpkknf4hb, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MzhjNzhhMzctMTMzMTYwZDMtNzFjMTYxMTktMTQ5M2IwMmE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 } {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } ] trace_id# 2026-02-19T08:12:30.507332Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=MzhjNzhhMzctMTMzMTYwZDMtNzFjMTYxMTktMTQ5M2IwMmE=, ActorId: [3:7608486242645938267:2540], ActorState: ExecuteState, LegacyTraceId: 01khtfaarg1jr4z7jdpkknf4hb, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 12277331088732495150 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-02-19T08:11:57.800025Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:57.805690Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:57.809008Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:57.814793Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:57.815449Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:57.836588Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.030178Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.329904Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.342729Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.370576Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.383015Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.717542Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.718195Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:58.774544Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.067965Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.081702Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.108552Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.122316Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.183861Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.226648Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.227370Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.248239Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.267649Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.285572Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.376663Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.385048Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.397461Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.409420Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.482249Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.693348Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.712185Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.712847Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.738414Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.747703Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.843611Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:11:59.855365Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.149342Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.198991Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.261203Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.263930Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.313472Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.335128Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.380247Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.552816Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.581331Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.594640Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.726476Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.726630Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.792034Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.958550Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.973803Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:00.992367Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.003555Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.013740Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.026473Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.038129Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.170792Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.281350Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.364806Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.379037Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.666288Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.676914Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.771743Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.787230Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.859813Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:01.987523Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.004251Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.029107Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.043560Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.248429Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.262154Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5335:707] 2026-02-19T08:12:02.491877Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [ ... k read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-02-19T08:12:20.459733Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:20.468530Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:20.489004Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:20.495735Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:20.496372Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:20.518762Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.050808Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.091153Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.119672Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.167384Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.191345Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.242263Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.244425Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.478499Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.514338Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.674240Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.856847Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.874294Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:21.909985Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.037300Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.039323Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.058802Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.204199Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.217837Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.399318Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.418130Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.432064Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.449796Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.587574Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.782400Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.783701Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.858556Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.926424Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.963841Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:22.980461Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.012040Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.285826Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.409829Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.411500Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.445703Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.545774Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.751414Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.768513Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.874826Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.890120Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.914362Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:23.915448Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.125997Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.140757Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.373322Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.394975Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.559783Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.583767Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.645648Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.645849Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.769063Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:24.985992Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.020852Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.048228Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.075741Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.258098Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.272965Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.311811Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.331089Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.356855Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.408772Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.435536Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.513469Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.690998Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.721166Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] 2026-02-19T08:12:25.723099Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5384:756] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |90.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |90.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert |90.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink |90.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ConflictingKeyR1WR2 |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |90.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> StreamCreator::Basic >> KqpVectorIndexes::VectorIndexUpsertClusterChange+Covered >> KqpVectorIndexes::OrderByCosineLevel2+Nullable-UseSimilarity >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> StreamCreator::WithResolvedTimestamps >> KqpVectorIndexes::VectorIndexInsertWithOverlap-Returning-Covered >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> Cdc::UpdateShardCount [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> KqpFail::OnPrepare [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> TStateStorageConfig::DuplicatedNodesTest [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> KqpVectorIndexes::VectorIndexDeleteOnReturning+Covered >> Cdc::UpdateRetentionPeriod >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> KqpVectorIndexes::VectorIndexUpsertClusterChange-Covered >> KqpVectorIndexes::VectorIndexDeleteOn+Covered >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> Cdc::HugeKey[TopicRunner] [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> StreamCreator::Basic [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> StreamCreator::WithResolvedTimestamps [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> Cdc::RacyActivateAndEnqueue >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> Cdc::HugeKeyDebezium |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfig::DuplicatedNodesTest [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2026-02-19T08:12:43.670489Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486302332512827:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:43.670531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:43.708877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:44.000578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:44.030158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:44.030276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:44.097387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.098181Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:44.106251Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486302332512565:2081] 1771488763636734 != 1771488763636737 2026-02-19T08:12:44.225492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.545278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.545302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.545308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.545387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:44.669868Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:44.925171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:44.943452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.116100Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:12:45.116123Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:12:45.117354Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:12:47.333602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771488765060, tx_id: 281474976710658 } } } 2026-02-19T08:12:47.334058Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:12:47.335581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.336285Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:12:47.336321Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:12:47.368009Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:12:47.368035Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:12:47.368760Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-02-19T08:12:47.450485Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7608486319512382897:2342] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:12:47.458289Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.458311Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-02-19T08:12:47.477607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:12:47.494185Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.494211Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2026-02-19T08:12:43.899326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486303277259329:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:43.899437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:43.989992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:44.388052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:44.388151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:44.404348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.417619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:44.433977Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486303277259167:2081] 1771488763853821 != 1771488763853824 2026-02-19T08:12:44.455340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:44.644191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.906054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:44.914377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.914400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.914406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.914539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:45.286917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:45.311288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.460707Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:12:45.460731Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:12:45.461350Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:12:47.628525Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771488765417, tx_id: 281474976710658 } } } 2026-02-19T08:12:47.629159Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:12:47.630966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.632572Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:12:47.632592Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:12:47.660385Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:12:47.660419Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:12:47.661073Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-02-19T08:12:47.736023Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7608486320457129501:2342] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:12:47.741590Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.741623Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-02-19T08:12:47.756599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:12:47.768066Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.768103Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] Test command err: 2026-02-19T08:12:32.126532Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486256962610176:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:32.127158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:32.536827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:32.536950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:32.588951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:32.620160Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:32.621054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486256962610137:2081] 1771488752114117 != 1771488752114120 2026-02-19T08:12:32.626164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:32.777465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:32.777487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:32.777501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:32.777579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:32.788307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:33.157886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:33.341184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:33.348600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:33.405973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:33.566938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:33.760634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:33.847524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.127415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486274142481198:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.127513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.130248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486274142481208:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.130371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.513965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.547329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.590124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.661740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.704858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.754338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.793691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.883334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.022147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486278437449381:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.022231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.022627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486278437449386:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.022703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486278437449387:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.022851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.032994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:37.050939Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486278437449390:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:37.118333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486256962610176:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:37.118402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:37.130956Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486278437449443:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:38.698694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... chemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:40.051564Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486291147694815:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:40.051659Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:40.064744Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:40.141477Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:40.177137Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:40.177202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:40.180069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:40.227096Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:40.227125Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:40.227132Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:40.227210Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:40.331133Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:40.586567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:40.592167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:40.603675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:40.658345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:40.829447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:40.920830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:41.081453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:43.511977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486304032598522:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:43.512060Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:43.512821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486304032598532:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:43.512895Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:43.569086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.606897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.646691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.690668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.746835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.794950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.852405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.919203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.022797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486308327566697:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.022882Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.023308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486308327566702:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.023350Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486308327566703:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.023440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.027216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:44.039132Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486308327566706:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:44.100791Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486308327566758:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:45.053898Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486291147694815:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:45.054651Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:45.996483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2026-02-19T08:12:34.960683Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-02-19T08:12:35.032433Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:36.669159Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-02-19T08:12:36.754507Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:38.458515Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-02-19T08:12:38.485076Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:40.065729Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 4 2026-02-19T08:12:40.074449Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:42.356860Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 5 2026-02-19T08:12:42.415628Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:12:43.793503Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:415} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003a81/r3tmp/tmpp3cfY3//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 6 2026-02-19T08:12:43.837010Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: 2026-02-19T08:12:32.326132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486259158860338:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:32.326193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:32.809823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:32.835401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:32.835492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:32.932761Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486259158860113:2081] 1771488752288231 != 1771488752288234 2026-02-19T08:12:32.951747Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:32.963348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:32.994433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:33.261663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:33.261680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:33.261685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:33.261780Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:33.285041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:34.022898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:34.034755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:34.118239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:34.399627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:34.583684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:34.678040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.898783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486276338731177:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.898890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.899294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486276338731187:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:36.899334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.235967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.283496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.319708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.322095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486259158860338:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:37.322664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:37.351884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.381946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.427584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.460112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.500840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.576674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486280633699355:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.576747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.576977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486280633699361:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.577008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486280633699360:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.577036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.580159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:37.590263Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486280633699364:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:37.694078Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486280633699415:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:40.735157Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486289727592735:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:40.735197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:40.777519Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:40.897517Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:40.905884Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486289727592705:2081] 1771488760732269 != 1771488760732272 2026-02-19T08:12:40.920484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:40.920565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:40.932484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:41.039609Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:41.064388Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:41.064405Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:41.064409Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:41.064470Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:41.668639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:41.674540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:41.687761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:41.753301Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:41.783026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:41.976918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:42.074717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.811437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486306907463759:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.811553Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.811922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486306907463769:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.811992Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:44.887582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.937398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.986503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.032223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.085167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.145657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.227145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.287625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.400527Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486311202431937:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.400642Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.401016Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486311202431942:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.401063Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486311202431943:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.401180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.405419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:45.422578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:12:45.428679Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486311202431946:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:45.531167Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486311202431997:3752] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:45.735227Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486289727592735:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:45.735296Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2026-02-19T08:10:19.225520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:10:19.288411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:10:19.288521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:10:19.310513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:10:19.376002Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:19.481044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:10:19.661508Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608485680047678705:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:10:19.661563Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608485684342646450:2432] HANDLE EvNavigateScheme dc-1 2026-02-19T08:10:19.661684Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608485680047678711:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:10:19.661822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608485684342646229:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608485680047678711:2146], cookie# 1 2026-02-19T08:10:19.663503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485684342646288:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646285:2291], cookie# 1 2026-02-19T08:10:19.663574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485684342646289:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646286:2291], cookie# 1 2026-02-19T08:10:19.663604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608485684342646290:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646287:2291], cookie# 1 2026-02-19T08:10:19.663642Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485680047678349:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646288:2291], cookie# 1 2026-02-19T08:10:19.663673Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485680047678352:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646289:2291], cookie# 1 2026-02-19T08:10:19.663689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608485680047678355:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608485684342646290:2291], cookie# 1 2026-02-19T08:10:19.663771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485684342646288:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485680047678349:2050], cookie# 1 2026-02-19T08:10:19.663793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485684342646289:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485680047678352:2053], cookie# 1 2026-02-19T08:10:19.663807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608485684342646290:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485680047678355:2056], cookie# 1 2026-02-19T08:10:19.663846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485684342646229:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485684342646285:2291], cookie# 1 2026-02-19T08:10:19.663880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608485684342646229:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:10:19.663922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485684342646229:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485684342646286:2291], cookie# 1 2026-02-19T08:10:19.663947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608485684342646229:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:10:19.663985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608485684342646229:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608485684342646287:2291], cookie# 1 2026-02-19T08:10:19.663998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608485684342646229:2291][/dc-1] Sync cookie mismatch: sender# [1:7608485684342646287:2291], cookie# 1, current cookie# 0 2026-02-19T08:10:19.664062Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608485680047678711:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:10:19.669898Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608485680047678711:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608485684342646229:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:10:19.670052Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608485680047678711:2146], cacheItem# { Subscriber: { Subscriber: [1:7608485684342646229:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:10:19.672856Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608485684342646451:2433], recipient# [1:7608485684342646450:2432], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:10:19.672951Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608485684342646450:2432] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:10:19.744271Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608485684342646450:2432] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:10:19.747511Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608485684342646450:2432] Handle TEvDescribeSchemeResult Forward to# [1:7608485684342646449:2431] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:10:19.752539Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608485680047678705:2144] Handle TEvProposeTransaction 2026-02-19T08:10:19.752567Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608485680047678705:2144] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:10:19.752624Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608485680047678705:2144] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608485684342646456:2437] 2026-02-19T08:10:19.769897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:10:19.903078Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608485684342646456:2437] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:10:19.903143Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608485684342646456:2437] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:10:19.903203Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608485684342646456:2437] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:10:19.903322Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEv ... ger/delayed_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:46.972549Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7608486300758325589:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:12:46.972715Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7608486300758325589:2105], cacheItem# { Subscriber: { Subscriber: [6:7608486317938195204:2338] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:12:46.972805Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7608486300758325589:2105], cacheItem# { Subscriber: { Subscriber: [6:7608486317938195205:2339] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:12:46.972960Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7608486317938195244:2342], recipient# [6:7608486317938195200:2320], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:12:46.973189Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7608486317938195200:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:47.053547Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7608486317938195212:2339] 2026-02-19T08:12:47.053631Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.053673Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7608486317938195206:2338] 2026-02-19T08:12:47.053735Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.053758Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7608486317938195207:2338] 2026-02-19T08:12:47.053767Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7608486317938195213:2339] 2026-02-19T08:12:47.053788Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.053796Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.053810Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7608486317938195208:2338] 2026-02-19T08:12:47.053817Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [6:7608486317938195214:2339] 2026-02-19T08:12:47.053854Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195205:2339][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.053896Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7608486317938195204:2338][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7608486300758325589:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:12:47.192216Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7608486300758325589:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:12:47.192381Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7608486300758325589:2105], cacheItem# { Subscriber: { Subscriber: [6:7608486317938195204:2338] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:12:47.192432Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7608486300758325589:2105], cacheItem# { Subscriber: { Subscriber: [6:7608486317938195205:2339] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:12:47.192566Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7608486322233162547:2343], recipient# [6:7608486317938195200:2320], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:12:47.192874Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7608486317938195200:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:31.210186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:31.210273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:31.210339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:31.210377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:31.210406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:31.210432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:31.210478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:31.210508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:31.210557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:31.210613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:31.211321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:31.211579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:31.351559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:31.351649Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:31.352528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:31.364846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:31.364979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:31.365167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:31.377053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:31.378685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:31.379510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.379865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:31.385499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:31.385693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:31.387832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:31.387918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:31.388231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:31.388280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:31.388326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:31.388621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.396244Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:31.521338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:31.521635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.521887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:31.521945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:31.522192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:31.522258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:31.525030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.525343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:31.525582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.525661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:31.525701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:31.525780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:31.528196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.528280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:31.528340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:31.530821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.530883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.530927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.530999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:31.534867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:31.537074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:31.537294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:31.538472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.538621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:31.538681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.538953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:31.539005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.539184Z node 1 :FLAT_TX_SCH ... as 3 2026-02-19T08:12:41.660793Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:12:41.660828Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:12:41.660858Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:12:41.660885Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-02-19T08:12:41.660910Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-02-19T08:12:41.662337Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.662453Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.662494Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:41.662534Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:12:41.662576Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:12:41.663755Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.663848Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.663883Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:41.663916Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:12:41.663955Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:12:41.665525Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.665618Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.665653Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:41.665685Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-02-19T08:12:41.665735Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:12:41.666633Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.666716Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:12:41.666750Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:12:41.666786Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-02-19T08:12:41.666821Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-02-19T08:12:41.666897Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:12:41.670224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:41.672960Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:41.673072Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:12:41.673191Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:12:41.674799Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:12:41.674847Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:12:41.676741Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:12:41.676858Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.676898Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2681:4668] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:12:41.680834Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:12:41.680889Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:12:41.680988Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:12:41.681017Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:12:41.681110Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:12:41.681138Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:12:41.681199Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:12:41.681224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:12:41.681284Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:12:41.681310Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:12:41.683694Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:12:41.683896Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:12:41.684037Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.684080Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2684:4671] 2026-02-19T08:12:41.684243Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:12:41.684300Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.684329Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2684:4671] 2026-02-19T08:12:41.684516Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:12:41.684610Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:12:41.684660Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.684688Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2684:4671] 2026-02-19T08:12:41.684837Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.684864Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2684:4671] 2026-02-19T08:12:41.684972Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:12:41.685005Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2684:4671] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [FAIL] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] Test command err: 2026-02-19T08:12:33.891191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486259860428144:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:33.892091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:34.337813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:34.348046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:34.348122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:34.430268Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:34.431775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:34.437845Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486259860428105:2081] 1771488753885108 != 1771488753885111 2026-02-19T08:12:34.600547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:34.658308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:34.658337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:34.658344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:34.658461Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:34.914251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:35.357571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:35.543279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:35.775867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:35.977516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.056043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.819174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486277040299168:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.819273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.819475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486277040299178:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.819526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.144267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.180486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.237175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.271133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.321252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.360463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.439180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.517330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.608266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281335267355:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.608374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.609760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281335267360:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.609837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281335267361:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.609950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.614053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:38.625282Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486281335267364:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:38.729912Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486281335267415:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:38.891626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486259860428144:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:38.891695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:40.352342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:42.242125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:42.268467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:42.373810Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:42.446472Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:42.453389Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486301634249779:2081] 1771488762166153 != 1771488762166156 2026-02-19T08:12:42.457570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:42.457665Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:42.473030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:42.647488Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:42.669475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:42.669497Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:42.669502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:42.669565Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:43.117223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:43.123605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:43.132438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.185005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.199467Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:43.392360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.485393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.918197Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486314519153559:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.918297Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.918611Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486314519153569:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.918659Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:45.994273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.032497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.068874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.114577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.176274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.224062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.264373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.316660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.407325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486318814121737:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.407433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.407721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486318814121743:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.407811Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.407823Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486318814121742:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.411914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:46.424065Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486318814121746:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:46.489169Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486318814121797:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:48.151898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> KqpWrite::InsertRevert [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |90.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> StreamCreator::TopicAutoPartitioning [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpVectorIndexes::VectorIndexUpsertClusterChangeReturning-Covered >> KqpVectorIndexes::OrderByCosineOnlyVectorCovered-Nullable-Overlap >> KqpVectorIndexes::EmptyVectorIndexUpdate-Partitioned-Overlap >> KqpVectorIndexes::OrderByCosineLevel2WithCover+Nullable >> TTopicApiDescribes::DescribeConsumer >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin >> KqpVectorIndexes::BadFormat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: 2026-02-19T08:12:34.969100Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486264924929262:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:34.969505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:35.014575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:35.427945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:35.428086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:35.464197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:35.487338Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:35.489027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486264924929209:2081] 1771488754945315 != 1771488754945318 2026-02-19T08:12:35.494924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:35.618925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:35.618970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:35.618978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:35.619144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:35.631444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:36.000524Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:36.061858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:36.130370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.287966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.473853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.559751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.680212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486282104800267:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.680346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.680793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486282104800277:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.680871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.027536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.061420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.104255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.127507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.153769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.181349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.211698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.251058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.317268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486286399768448:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.317342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.317506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486286399768454:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.317557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486286399768453:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.317582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.320741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:39.332483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486286399768457:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:39.401948Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486286399768508:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:39.966042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486264924929262:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:39.966110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:40.801939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... error=scheme_cache_undelivered_message; 2026-02-19T08:12:43.036113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:43.170513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:43.170602Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:43.190272Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:43.197988Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486299660799751:2081] 1771488763007068 != 1771488763007071 2026-02-19T08:12:43.211534Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:43.212156Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:43.293548Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:43.293570Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:43.293576Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:43.293664Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:43.607194Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:43.890613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:43.902045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:43.908824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:43.975466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.077245Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:44.207335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.296092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.954268Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486316840670814:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.954369Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.955361Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486316840670823:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:46.955417Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.024771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.058071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.097330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.172135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.204971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.239673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.281787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.330140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.410515Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486321135638992:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.410592Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.410807Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486321135638997:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.410841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486321135638998:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.410879Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.414101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:47.425419Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486321135639001:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:47.488491Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486321135639052:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:48.008364Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486303955767087:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:48.008421Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:49.033397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest >> KqpWrite::OutOfSpace [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> KqpVectorIndexes::CoveredVectorIndexWithFollowers+StaleRO >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> KqpVectorIndexes::VectorIndexUpsertClusterChange+Covered [GOOD] >> KqpVectorIndexes::VectorIndexUpdatePkClusterChangeReturning-Covered-Overlap >> KqpVectorIndexes::VectorIndexInsertWithOverlap-Returning-Covered [GOOD] >> KqpVectorIndexes::VectorIndexInsertWithOverlap-Returning+Covered >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] Test command err: 2026-02-19T08:12:43.496932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486306150063666:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:43.496998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:43.706470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:43.732501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:43.732580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:43.841862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486306150063536:2081] 1771488763481518 != 1771488763481521 2026-02-19T08:12:43.858990Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:43.867615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:43.872123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.248737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.248768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.248775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.248851Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:44.503900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:44.765262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:44.778092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:44.788438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.962845Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-02-19T08:12:47.268688Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7608486323329933801:2338] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:12:47.277175Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.277204Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-02-19T08:12:47.289343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:12:47.298084Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:47.298105Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# 2026-02-19T08:12:48.094975Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486327925083944:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:48.095057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:48.112797Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:48.237892Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486327925083905:2081] 1771488768093681 != 1771488768093684 2026-02-19T08:12:48.248436Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:48.264995Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:48.265091Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:48.269375Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:48.399721Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:48.486445Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:48.486472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:48.486481Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:48.486549Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:48.738338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:48.749874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.783707Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-02-19T08:12:49.097958Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:51.395854Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7608486340809986864:2338] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:12:51.402797Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:51.402835Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-02-19T08:12:51.413532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:12:51.429440Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-02-19T08:12:51.429479Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# >> TTopicApiDescribes::GetPartitionDescribe >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::OutOfSpace [GOOD] Test command err: 2026-02-19T08:12:33.820939Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486260191546803:2135];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:33.820974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:34.181391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:34.181505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:34.219516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:34.267145Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486260191546700:2081] 1771488753816451 != 1771488753816454 2026-02-19T08:12:34.277184Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:34.284899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:34.405756Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:34.426580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:34.426603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:34.426610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:34.426735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:34.853870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:34.899216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:34.919065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:34.999112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:35.153557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:35.341242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:35.424701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.527171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486277371417769:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.527263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.527637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486277371417779:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.527701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.913398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.965737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.990605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.016628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.053192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.094312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.131146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.184164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.261433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281666385949:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.261548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.261667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281666385954:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.261846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486281666385956:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.261990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.266555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:38.277855Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486281666385958:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:38.350324Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486281666386009:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:38.821002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486260191546803:2135];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:38.821067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ... waiting for SysViewsRoster update finished 2026-02-19T08:12:44.221793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution ... node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.771132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:45.201465Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:45.201529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:45.201561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:45.201906Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:45.213145Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:45.548885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:45.615189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.889071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.349592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:46.657460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.271489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1911:3516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.271852Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.273023Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1987:3537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.273289Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.302347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.509983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.776509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.076841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.349195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.627995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.913555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.224765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.615970Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2801:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.616447Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.617424Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2805:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.617529Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.617628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2808:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.624354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:49.798354Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2810:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:49.873235Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:2865:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_DISK_GROUP_OUT_OF_SPACE;details=;tx_id=3; 2026-02-19T08:12:51.912106Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:889: SelfId: [2:3174:4457], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [2:3149:4457]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:3174:4457]. Ignored this error.{
: Error: , code: 2006 } 2026-02-19T08:12:51.912745Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [2:3168:4457], SessionActorId: [2:3149:4457], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036
: Error: , code: 2006 . sessionActorId=[2:3149:4457]. 2026-02-19T08:12:51.913246Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=2&id=Zjc0NTYzNjAtNTBjOGQ3ODEtZWUzYTYyNTctMzI0MWIwZjk=, ActorId: [2:3149:4457], ActorState: ExecuteState, LegacyTraceId: 01khtfazq8aewrb0qgfrxzwqdy, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [2:3169:4457] from: [2:3168:4457] trace_id# 2026-02-19T08:12:51.913421Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:3169:4457] TxId: 281474976715673. Ctx: { TraceId: 01khtfazq8aewrb0qgfrxzwqdy, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc0NTYzNjAtNTBjOGQ3ODEtZWUzYTYyNTctMzI0MWIwZjk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036 subissue: {
: Error: , code: 2006 } } trace_id# 2026-02-19T08:12:51.913805Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=Zjc0NTYzNjAtNTBjOGQ3ODEtZWUzYTYyNTctMzI0MWIwZjk=, ActorId: [2:3149:4457], ActorState: ExecuteState, LegacyTraceId: 01khtfazq8aewrb0qgfrxzwqdy, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`." issue_code: 2036 severity: 1 issues { issue_code: 2006 severity: 1 } } trace_id# >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin >> KqpVectorIndexes::VectorIndexDeleteOn+Covered [GOOD] >> KqpVectorIndexes::VectorIndexDeleteOn-Covered >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser >> KqpVectorIndexes::VectorIndexDeleteOnReturning+Covered [GOOD] >> KqpVectorIndexes::VectorIndexDeleteOnReturning-Covered >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> KqpVectorIndexes::VectorIndexUpsertClusterChange-Covered [GOOD] >> KqpVectorIndexes::VectorIndexUpsertClusterChangeReturning+Covered |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TTopicApiDescribes::DescribeTopic |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |90.4%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser >> TBSV::CleanupDroppedVolumesOnRestart >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffix-Nullable-Covered [GOOD] >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffixWithOverlap+Covered >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser >> TBSV::ShardsNotLeftInShardsToDelete >> TBSV::CreateBlockStoreVolumeDirect >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TBSV::CreateBlockStoreVolumeDirect [GOOD] >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:12:57.244416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:12:57.244504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:57.244571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:12:57.244623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:12:57.244658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:12:57.244704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:12:57.244740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:12:57.244764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:12:57.244843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:57.244913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:12:57.245699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:12:57.246073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:12:57.340496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:12:57.340565Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:57.358873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:12:57.359680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:12:57.359886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:12:57.371396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:12:57.372053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:12:57.372740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:57.373029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:12:57.377159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:57.377343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:12:57.378576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:57.378640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:57.378745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:12:57.378810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:57.378909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:12:57.379096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:12:57.534809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.535900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:57.536896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-02-19T08:12:59.080214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-02-19T08:12:59.080241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-02-19T08:12:59.080262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-02-19T08:12:59.080285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-02-19T08:12:59.080307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-02-19T08:12:59.080334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-02-19T08:12:59.080355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-02-19T08:12:59.080381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-02-19T08:12:59.080402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-02-19T08:12:59.080424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-02-19T08:12:59.080472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-02-19T08:12:59.080510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-02-19T08:12:59.080550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-02-19T08:12:59.080573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-02-19T08:12:59.080594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-02-19T08:12:59.080615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-02-19T08:12:59.080637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-02-19T08:12:59.080658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-02-19T08:12:59.080680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-02-19T08:12:59.080701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:12:59.080722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:12:59.080755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:12:59.080789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:12:59.080813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:12:59.080838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:12:59.080861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:12:59.080913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:12:59.080942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:12:59.080964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:12:59.081122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.081207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.081314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.081449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.081555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.081807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.082150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.082303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.082757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.082831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.083811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.084938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.117602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:12:59.123393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:59.123478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:59.123956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:12:59.124014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:59.124069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:12:59.124354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:875:2830] sender: [1:938:2058] recipient: [1:15:2062] 2026-02-19T08:12:59.164825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:12:59.165032Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 257us result status StatusPathDoesNotExist 2026-02-19T08:12:59.165181Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CreateBlockStoreVolumeDirect [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:12:58.882407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:12:58.882504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:58.882583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:12:58.882621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:12:58.882668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:12:58.882715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:12:58.882751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:12:58.882780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:12:58.882846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:58.882930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:12:58.883748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:12:58.884059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:12:58.975861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:12:58.975916Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:58.988304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:12:58.988902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:12:58.989137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:12:59.000068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:12:59.000754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:12:59.001407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:59.001703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:12:59.004632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:59.004813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:12:59.006034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:59.006096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:59.006203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:12:59.006253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:59.006347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:12:59.006557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.018829Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-02-19T08:12:59.176453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:12:59.176697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.176952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:12:59.177007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:12:59.177244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:12:59.177321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:59.179922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:59.180140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:12:59.180352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.180437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:12:59.180480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:12:59.180528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:12:59.182817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.182893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:12:59.182942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:12:59.185092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.185159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.185204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:12:59.185254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:12:59.189096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:12:59.191444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:12:59.191642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:12:59.192681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:59.192821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:12:59.192874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:12:59.193148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:12:59.193210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:12:59.193377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:12:59.193459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: sch ... ARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:12:59.545386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-02-19T08:12:59.545806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.545850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:12:59.545972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:12:59.546029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:12:59.546069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:12:59.546104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:12:59.546156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:12:59.546195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:12:59.546235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:12:59.546265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:12:59.546430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-02-19T08:12:59.546476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-02-19T08:12:59.546505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-02-19T08:12:59.546537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2026-02-19T08:12:59.547121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:12:59.547206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:12:59.547243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:12:59.547279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-02-19T08:12:59.547317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:12:59.548001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:12:59.548082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:12:59.548123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:12:59.548149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2026-02-19T08:12:59.548174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-02-19T08:12:59.548229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-02-19T08:12:59.548285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:280:2268] 2026-02-19T08:12:59.647758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:12:59.647878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:12:59.647955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:12:59.647998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:281:2269] TestWaitNotification: OK eventTxId 101 2026-02-19T08:12:59.648471Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolumeDirect" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:12:59.648706Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolumeDirect" took 258us result status StatusSuccess 2026-02-19T08:12:59.649133Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/BSVolumeDirect" PathDescription { Self { Name: "BSVolumeDirect" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeBlockStoreVolume CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 BSVVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } BlockStoreVolumeDescription { Name: "BSVolumeDirect" PathId: 2 VolumeConfig { BlockSize: 4096 Partitions { BlockCount: 16 } Version: 1 TabletVersion: 3 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Partitions { PartitionId: 0 TabletId: 72075186233409546 } VolumeTabletId: 72075186233409547 AlterVersion: 1 MountToken: "" TokenVersion: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:12:59.649725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolumeDirect" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:12:59.649974Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolumeDirect" took 209us result status StatusSuccess 2026-02-19T08:12:59.650355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/BSVolumeDirect" PathDescription { Self { Name: "BSVolumeDirect" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeBlockStoreVolume CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 BSVVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } BlockStoreVolumeDescription { Name: "BSVolumeDirect" PathId: 2 VolumeConfig { BlockSize: 4096 Partitions { BlockCount: 16 } Version: 1 TabletVersion: 3 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } VolumeExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Partitions { PartitionId: 0 TabletId: 72075186233409546 } VolumeTabletId: 72075186233409547 AlterVersion: 1 MountToken: "" TokenVersion: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: 2026-02-19T08:12:35.208836Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486272225049263:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:35.208944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:35.654239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:35.668287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:35.668408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:35.773885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486272225049101:2081] 1771488755133944 != 1771488755133947 2026-02-19T08:12:35.776340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:35.782547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:35.981106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:35.981133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:35.981141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:35.981249Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:35.985001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:36.198489Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:36.503667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:36.510719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:36.565298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.797624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.066299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.206551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.923283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486285109952869:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.923386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.923804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486285109952879:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:38.923850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.304591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.337190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.369001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.424611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.460839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.504091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.549202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.602474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.676999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486289404921051:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.677069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.677386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486289404921057:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.677400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486289404921058:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.677458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:39.681388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:39.692113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486289404921061:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:39.797021Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486289404921112:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:40.208333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486272225049263:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:40.208411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:41.279157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... 026-02-19T08:12:50.688590Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608486335349829961:2081] 1771488770471094 != 1771488770471097 2026-02-19T08:12:50.722240Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:50.722319Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:50.734443Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:50.825718Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:50.945897Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:50.945919Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:50.945926Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:50.946005Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:51.506123Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:51.535548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:51.560268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:51.667839Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:51.851401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:51.948418Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.650505Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486352529701023:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:54.650618Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:54.650980Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486352529701033:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:54.651052Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:54.755388Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.800899Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.838342Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.885329Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.932796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.994937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.046638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.139809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.241560Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486356824669197:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:55.241668Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:55.242057Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486356824669202:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:55.242107Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486356824669203:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:55.242246Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:55.245965Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:55.267019Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486356824669206:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:55.367958Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486356824669257:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:55.498913Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486335349830119:2174];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:55.498995Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:57.669626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.013440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:12:58.334738Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=NTJlYzJmZGQtMzQ5Zjc2YWItZjQ1ODg1MDgtYTU1ZjRkZWE=, ActorId: [3:7608486365414604140:2529], ActorState: ExecuteState, LegacyTraceId: 01khtfb5sz5aew9w40g8sgf59d, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Tables: `/Root/TestTable`." issue_code: 2001 severity: 1 } trace_id# >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram >> KqpVectorIndexes::BadFormat [GOOD] >> KqpVectorIndexes::BuildIndexTimesAndUser >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:34.366575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:34.458913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:34.476497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:34.476583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:34.477086Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:34.794032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:34.794210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:34.945165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488751452102 != 1771488751452106 2026-02-19T08:12:34.957418Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:35.006844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:35.141857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:35.634715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:35.634786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:35.634843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:35.635240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:35.648641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:35.996215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:36.070662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.378272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:36.794551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.123107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.962542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.962744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.963952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.964190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.995816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.212913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.481431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.770739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.023693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.277342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.556286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:39.859844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:40.251682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:40.251814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:40.252250Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:40.252334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:40.252409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:40.258473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:40.439078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2804:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:40.492470Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2864:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:43.526455Z node 1 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-02-19T08:12:43.527222Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3179:4457] TxId: 281474976715674. Ctx: { TraceId: 01khtfapn34dnsp2m14f4z9dm0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2IwZDg1NGMtNGJhNWFkMi0yYjQ0YTM3YS0xYTUzNzViYQ==, PoolId: default, IsStreamingQuery: ... 344471188542:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.484452Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:52.518233Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:52.588453Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:52.589901Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608486344471188500:2081] 1771488772483512 != 1771488772483515 2026-02-19T08:12:52.630094Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:52.630181Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:52.635627Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:52.730325Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:52.730360Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:52.730367Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:52.730456Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:52.771762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.258731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:53.267304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:53.278492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:53.350304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:53.513341Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:53.513617Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:53.590589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.703043Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486361651059572:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.703122Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.703520Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486361651059582:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.703585Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.852229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.916584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.022406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.078173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.134777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.200817Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.266082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.355288Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.485882Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486344471188542:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.485945Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:57.501890Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486365946027745:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.501975Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.509889Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486365946027750:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.509994Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486365946027751:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.510135Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.520134Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:57.538872Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486365946027754:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:57.624771Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486365946027808:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:59.843717Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration >> KqpVectorIndexes::EmptyVectorIndexUpdate-Partitioned-Overlap [GOOD] >> KqpVectorIndexes::EmptyVectorIndexUpdate-Partitioned+Overlap >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult |90.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:12:59.540775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:12:59.540912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:59.540979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:12:59.541014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:12:59.541045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:12:59.541100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:12:59.541129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:12:59.541158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:12:59.541241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:59.541329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:12:59.542246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:12:59.542592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:12:59.638324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:12:59.638383Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:59.650393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:12:59.651034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:12:59.651230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:12:59.662788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:12:59.663473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:12:59.664379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:59.664687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:12:59.667765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:59.667940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:12:59.669173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:59.669237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:59.669340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:12:59.669391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:59.669495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:12:59.669685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:12:59.842390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.843942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:59.844506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000064, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:02.517650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000064 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:02.517702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000064, at schemeshard: 72057594046678944 2026-02-19T08:13:02.517847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-02-19T08:13:02.517994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:13:02.518034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:13:02.518097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:13:02.518134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:13:02.518187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:13:02.518246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-02-19T08:13:02.518290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-02-19T08:13:02.518330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:13:02.518362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-02-19T08:13:02.518393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 129:0 2026-02-19T08:13:02.518510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-02-19T08:13:02.518545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-02-19T08:13:02.518578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 57 2026-02-19T08:13:02.518609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 49], 18446744073709551615 2026-02-19T08:13:02.536250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:24 2026-02-19T08:13:02.536312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-02-19T08:13:02.536451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:23 2026-02-19T08:13:02.536507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-02-19T08:13:02.537626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:13:02.537781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:13:02.537817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:13:02.538016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:02.538046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:02.538191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 49] 2026-02-19T08:13:02.541123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:02.541187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-02-19T08:13:02.541227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 129, path id: 49 FAKE_COORDINATOR: Erasing txId 129 2026-02-19T08:13:02.541791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:13:02.541905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:13:02.541959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:13:02.541998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 49], version: 18446744073709551615 2026-02-19T08:13:02.542038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-02-19T08:13:02.542395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:13:02.542441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 49], at schemeshard: 72057594046678944 2026-02-19T08:13:02.542506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:13:02.542928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:13:02.543015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:13:02.543042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:13:02.543068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 57 2026-02-19T08:13:02.543096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:13:02.543161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-02-19T08:13:02.543528Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 2026-02-19T08:13:02.544055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2026-02-19T08:13:02.547861Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 2026-02-19T08:13:02.548572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2026-02-19T08:13:02.551886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-02-19T08:13:02.559364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:13:02.559507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-02-19T08:13:02.559603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:24 2026-02-19T08:13:02.559669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2026-02-19T08:13:02.560364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2026-02-19T08:13:02.560417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2026-02-19T08:13:02.561217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-02-19T08:13:02.561331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-02-19T08:13:02.561384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:2091:3958] TestWaitNotification: OK eventTxId 129 |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [FAIL] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> KqpVectorIndexes::VectorIndexUpsertClusterChangeReturning-Covered [GOOD] >> KqpVectorIndexes::VectorResolveDuplicateEvent >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> Cdc::SupportedTypes [GOOD] >> Cdc::StringEscaping >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> KqpVectorIndexes::VectorIndexInsertWithOverlap-Returning+Covered [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> KqpVectorIndexes::VectorIndexUpdatePkClusterChangeReturning-Covered-Overlap [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> KqpVectorIndexes::VectorIndexDeleteOn-Covered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous >> KqpVectorIndexes::VectorIndexDeleteOnReturning-Covered [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorIndexInsertWithOverlap-Returning+Covered [GOOD] Test command err: 2026-02-19T08:12:44.098934Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486306834451693:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:44.099005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:44.407062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:44.415368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:44.415464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:44.525298Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486306834451562:2081] 1771488764045366 != 1771488764045369 2026-02-19T08:12:44.541817Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:44.544047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.662622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.716752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.716774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.716780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.718638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:45.109896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:45.205972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:45.226389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:45.288673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.489851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.662772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.747564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.612997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486319719355329:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.613123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.613441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486319719355339:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.613497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.012137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.058140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.090950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.125778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.178289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.210879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.243292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.289534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.365092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486324014323506:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.365182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.365256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486324014323511:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.365576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486324014323513:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.365632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.369172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:48.380568Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486324014323514:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:48.439946Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486324014323566:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:49.098724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486306834451693:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:49.098811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:50.069474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... UILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.201700Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.217935Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715772 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:01.219849Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715772 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:01.219957Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715772 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:01.220678Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.220769Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.226956Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715772 2026-02-19T08:13:01.227118Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715772 2026-02-19T08:13:01.227847Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2026-02-19T08:13:01.227865Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2026-02-19T08:13:01.227877Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found 2026-02-19T08:13:01.227914Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2026-02-19T08:13:01.228129Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.228221Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.228240Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:01.229078Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:13:01.229158Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486375631349506:2561], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715771, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715772, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 106 UploadBytes: 2674 ReadRows: 120 ReadBytes: 2570 CpuTimeUs: 10060, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.229169Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:13:01.235591Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:13:01.235874Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 Issues { message: "TShardStatus { ShardIdx: 72057594046644480:51 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" data_columns: "data" data_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488780 } EndTime { seconds: 1771488781 } UserSID: "" } 2026-02-19T08:13:01.273817Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:13:01.969871Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorIndexUpdatePkClusterChangeReturning-Covered-Overlap [GOOD] Test command err: 2026-02-19T08:12:44.152415Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486307831636474:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:44.153300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:44.537978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:44.576871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:44.576955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:44.628895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.672341Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486307831636423:2081] 1771488764118426 != 1771488764118429 2026-02-19T08:12:44.676843Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:44.783617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.807698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.807722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.807729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.807881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:45.156770Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:45.348988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:45.452098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.605213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.789904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.881090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.817574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486320716540190:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.817726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.818087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486320716540200:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.818160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.207228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.238507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.282597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.320319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.356960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.390152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.420676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.464039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.535114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486325011508373:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.535195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.535315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486325011508378:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.535375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486325011508380:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.535417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.538542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:48.548760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486325011508382:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:48.672707Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486325011508433:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:49.120639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486307831636474:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:49.120716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:50.322575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.691734Z no ... LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:01.225253Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.225350Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.225476Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715766 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:01.228201Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:01.228264Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2026-02-19T08:13:01.236294Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:01.236439Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:01.237457Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.237579Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.240217Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715766 2026-02-19T08:13:01.240343Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:01.240688Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:01.240775Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.240797Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:01.240998Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:13:01.241065Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486377327416327:2564], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5415, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:01.241077Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:13:01.241581Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:13:01.241834Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488780 } EndTime { seconds: 1771488781 } UserSID: "" } 2026-02-19T08:13:01.256211Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2026-02-19T08:13:01.290424Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:13:02.909324Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill >> KqpVectorIndexes::VectorIndexUpsertClusterChangeReturning+Covered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorIndexDeleteOn-Covered [GOOD] Test command err: 2026-02-19T08:12:46.170190Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486315402591350:2198];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:46.170462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:46.212210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:46.489668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:46.489817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:46.520253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:46.530297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:46.544482Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486315402591179:2081] 1771488766140492 != 1771488766140495 2026-02-19T08:12:46.559594Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:46.694428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:46.694452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:46.694458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:46.694531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:46.697287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:47.136460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:47.145456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:47.178092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:47.199125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.342463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.489939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.557661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.596519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486328287494954:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.596626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.596918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486328287494964:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.596981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.918897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.981518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.018992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.075147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.132889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.198340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.283842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.363546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.459975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486332582463137:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.460028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.460126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486332582463142:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.460248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486332582463144:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.460271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.464521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:50.488536Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486332582463146:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:50.556717Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486332582463197:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:51.173865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486315402591350:2198];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:51.174327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710765 2026-02-19T08:13:03.062967Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:03.063065Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.063199Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976715675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976710757 } Internal: true } TxId: 281474976710765 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:03.064274Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976715675, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710765 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.064418Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710765 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.065283Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:03.065368Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.073069Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976715675, txId# 281474976710765 2026-02-19T08:13:03.073231Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710765 2026-02-19T08:13:03.075199Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:03.075339Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.075369Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:03.075649Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done 2026-02-19T08:13:03.075732Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486387469129382:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5355, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.075746Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715675, subscribers count# 1 2026-02-19T08:13:03.076803Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715675 2026-02-19T08:13:03.077040Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488782 } EndTime { seconds: 1771488783 } UserSID: "" } 2026-02-19T08:13:03.077347Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:03.077367Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2026-02-19T08:13:03.077403Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorIndexDeleteOnReturning-Covered [GOOD] Test command err: 2026-02-19T08:12:46.418940Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486316789050505:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:46.419932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:46.697551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:46.704832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:46.704983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:46.772297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:46.776128Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:46.779289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486316789050464:2081] 1771488766415304 != 1771488766415307 2026-02-19T08:12:46.885103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:46.894287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:46.894313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:46.894320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:46.894421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:47.385967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:47.404821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:47.428676Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:47.449058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.609147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.761872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.821789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.588414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486329673954234:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.588917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.589217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486329673954244:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.589274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.977428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.017668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.060416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.114379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.183588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.246528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.289236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.371714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.499777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486333968922418:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.499859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.500109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486333968922423:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.500138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486333968922424:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.500221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.504287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:50.513504Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486333968922427:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:50.581201Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486333968922480:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:51.420037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486316789050505:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:51.420125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:52.242064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:03.394963Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.395026Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.395142Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715766 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:03.395979Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.396080Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.396638Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.396735Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.402951Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715766 2026-02-19T08:13:03.403074Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:03.403314Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.403396Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.403440Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:03.403679Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:13:03.403752Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391443137955:2562], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 5014, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.403762Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:13:03.410076Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:13:03.410300Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488783 } EndTime { seconds: 1771488783 } UserSID: "" } 2026-02-19T08:13:03.425278Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:03.433181Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2026-02-19T08:13:03.433227Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:12:58.656204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:12:58.656297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:58.656366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:12:58.656401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:12:58.656433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:12:58.656492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:12:58.656521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:12:58.656544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:12:58.656636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:12:58.656707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:12:58.657495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:12:58.658276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:12:58.748791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:12:58.748840Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:58.760253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:12:58.760875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:12:58.761064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:12:58.774319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:12:58.774945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:12:58.775602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:12:58.775898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:12:58.778822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:58.778982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:12:58.780182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:12:58.780241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:12:58.780340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:12:58.780392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:12:58.780488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:12:58.780670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:12:58.940484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.941949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:12:58.942511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 78944, LocalPathId: 38] was 2 2026-02-19T08:13:06.336187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:13:06.336227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:06.336281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:13:06.336315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:06.336379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:13:06.336443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:13:06.336497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:13:06.336568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:06.336609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:13:06.336653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:13:06.336791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:06.336837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:13:06.336880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:13:06.336948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:13:06.354320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:13:06.354426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:13:06.354790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:13:06.354851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:13:06.355461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:13:06.355623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:13:06.355663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:13:06.355850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:06.355884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:06.356077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:13:06.356256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:06.356296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:13:06.356340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:13:06.422558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:06.422715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:06.422764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:13:06.422808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:13:06.422871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:13:06.423518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:13:06.423659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:13:06.423897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:13:06.424317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:06.424391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:06.424427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:13:06.424474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:13:06.424513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:13:06.424591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:13:06.424930Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-02-19T08:13:06.425273Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-02-19T08:13:06.425363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:06.425628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:13:06.435320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:13:06.443921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:13:06.450160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:13:06.450408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:13:06.451005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:13:06.451496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:13:06.451544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:13:06.451986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:13:06.452103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:13:06.452156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:795:2772] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-02-19T08:13:06.452931Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-02-19T08:13:06.453033Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:33.764160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:33.764271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.764341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:33.764375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:33.764408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:33.764442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:33.764476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:33.764531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:33.764603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.764683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:33.765495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.765807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:33.882794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:33.882858Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:33.883624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.900094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:33.900194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:33.900348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:33.911904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:33.912542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:33.913286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.913561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:33.918614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.918786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:33.920482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:33.920542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.920740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:33.920799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:33.920858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:33.921115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.929844Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:34.111487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:34.111748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.111940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:34.111988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:34.112205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:34.112285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:34.116901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.117133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:34.117306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.117368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:34.117403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:34.117463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:34.119203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.119250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:34.119326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:34.120939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.120989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.121028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.121078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:34.124181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:34.125839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:34.126042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:34.126844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.126960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:34.127017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.127253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:34.127309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.127459Z node 1 :FLAT_TX_SCH ... as 3 2026-02-19T08:13:03.455949Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:13:03.455980Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:13:03.456011Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:13:03.456037Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-02-19T08:13:03.456063Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-02-19T08:13:03.466133Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.466236Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.466277Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:03.466314Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:13:03.466352Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:13:03.467185Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.467266Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.467299Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:03.467330Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:13:03.467362Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:13:03.468760Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.468839Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.468871Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:03.468902Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-02-19T08:13:03.468933Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:13:03.470043Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.470128Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:03.470161Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:03.470191Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-02-19T08:13:03.470228Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-02-19T08:13:03.470311Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:13:03.479034Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:03.479202Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:03.483125Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:03.483276Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:13:03.484763Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:13:03.484805Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:13:03.486649Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:13:03.486765Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.486821Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2679:4666] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:13:03.488249Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:13:03.488298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:13:03.488382Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:13:03.488408Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:13:03.488486Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:13:03.488514Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:13:03.488570Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:13:03.488596Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:13:03.488653Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:13:03.488678Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:13:03.490854Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:13:03.491012Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.491050Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2682:4669] 2026-02-19T08:13:03.491373Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:13:03.491528Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:13:03.491582Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.491611Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2682:4669] 2026-02-19T08:13:03.491774Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:13:03.491824Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.491852Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2682:4669] 2026-02-19T08:13:03.492030Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:13:03.492083Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.492110Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2682:4669] 2026-02-19T08:13:03.492260Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:13:03.492288Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2682:4669] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:04.936391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:04.936484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:04.936559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:04.936601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:04.936633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:04.936687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:04.936721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:04.936744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:04.936817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:04.936889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:04.946047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:04.946481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:05.080291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:05.080366Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:05.094557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:05.095315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:05.095488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:05.107654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:05.108420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:05.109134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:05.109487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:05.112829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:05.113047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:05.114281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:05.114354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:05.114466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:05.114508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:05.114547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:05.114714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:05.270709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.271766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.271888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.271986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:05.272866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... , operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-02-19T08:13:06.672725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:576: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-02-19T08:13:06.672786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:228: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:06.672828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:13:06.673036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 105:0 Propose -> Done 2026-02-19T08:13:06.673216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:06.681865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:13:06.682351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:06.682402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:13:06.682756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:06.682820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-02-19T08:13:06.683263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:13:06.683318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 105:0 ProgressState 2026-02-19T08:13:06.683426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:13:06.683471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:13:06.683521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:13:06.683563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:13:06.683601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-02-19T08:13:06.683638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:13:06.683676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-02-19T08:13:06.683709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:0 2026-02-19T08:13:06.683845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:13:06.683884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-02-19T08:13:06.683916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-02-19T08:13:06.684602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:13:06.684695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:13:06.684732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:13:06.684788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:13:06.684824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:06.684916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-02-19T08:13:06.691227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:811:2776] 2026-02-19T08:13:06.694739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:13:06.694844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:13:06.694880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1018:2941] TestWaitNotification: OK eventTxId 105 2026-02-19T08:13:06.695550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:06.695871Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 301us result status StatusSuccess 2026-02-19T08:13:06.696783Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 2 ChildPartitionIds: 3 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 4 ChildPartitionIds: 5 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { ToBound: "?" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\277" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "?" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\277" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TTopicApiDescribes::DescribeConsumer [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorIndexUpsertClusterChangeReturning+Covered [GOOD] Test command err: 2026-02-19T08:12:46.349511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486318813029483:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:46.351582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:46.655955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:46.674076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:46.674185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:46.746351Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486318813029440:2081] 1771488766347335 != 1771488766347338 2026-02-19T08:12:46.753519Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:46.757903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:46.869521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:46.869541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:46.869548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:46.869616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:46.956635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:47.341726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:47.354419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:47.392361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.540518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.698515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.760470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.455216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486331697933214:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.455350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.455623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486331697933224:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.455672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:49.741257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.828089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.859609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.893410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.930908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:49.994058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.053632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.190357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:50.295947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486335992901392:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.296054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.296340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486335992901397:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.296389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486335992901398:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.296480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:50.300909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:50.331277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486335992901401:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:50.402370Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486335992901454:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:51.349330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486318813029483:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:51.398450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:52.042360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:52.325246Z no ... one: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:03.823306Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.823371Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.823493Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976710675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976715757 } Internal: true } TxId: 281474976715766 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:03.824389Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976710675, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.824486Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976710675, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:03.825097Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.825176Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.832196Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710675, txId# 281474976715766 2026-02-19T08:13:03.832327Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976715766 2026-02-19T08:13:03.833508Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking 2026-02-19T08:13:03.833629Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Unlocking TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.833652Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:03.834764Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:03.834780Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2026-02-19T08:13:03.834793Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2026-02-19T08:13:03.834901Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done 2026-02-19T08:13:03.834983Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710675 Done TBuildInfo{ IndexBuildId: 281474976710675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, DataColumns: data, DataColumns: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486391547620772:2566], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 514 ReadRows: 50 ReadBytes: 650 CpuTimeUs: 4983, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:03.834994Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710675, subscribers count# 1 2026-02-19T08:13:03.838394Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710675 2026-02-19T08:13:03.838647Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" data_columns: "data" data_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488783 } EndTime { seconds: 1771488783 } UserSID: "" } 2026-02-19T08:13:03.903109Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:13:04.816303Z node 2 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill >> TPopulatorTest::Boot >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-02-19T08:13:08.908982Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-02-19T08:13:08.926305Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-02-19T08:13:08.926443Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-02-19T08:13:08.928659Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-02-19T08:13:08.928738Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-02-19T08:13:08.928786Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-02-19T08:13:08.928866Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-02-19T08:13:08.928897Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-02-19T08:13:08.928921Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-02-19T08:13:08.928957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-02-19T08:13:08.928978Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-02-19T08:13:08.928996Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-02-19T08:13:08.929027Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-02-19T08:13:08.929048Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-02-19T08:13:08.929088Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-02-19T08:13:08.929127Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-02-19T08:13:08.929151Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-02-19T08:13:08.929189Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-02-19T08:13:08.929228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-02-19T08:13:08.929251Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-02-19T08:13:08.929269Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-02-19T08:13:08.929356Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:08.929444Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-02-19T08:13:08.929532Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.929744Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:08.929808Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.929892Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.930026Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-02-19T08:13:08.930104Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-02-19T08:13:08.930156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:08.930201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.930252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.930325Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-02-19T08:13:08.930378Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-02-19T08:13:08.930427Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.930504Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-02-19T08:13:08.930561Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-02-19T08:13:08.930693Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.930765Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-02-19T08:13:08.930801Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.930858Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-02-19T08:13:08.930897Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-02-19T08:13:08.930971Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.931034Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-02-19T08:13:08.931081Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.931137Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-02-19T08:13:08.931188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-02-19T08:13:08.931260Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:08.931318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-02-19T08:13:08.931356Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:08.931412Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:08.931456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.931526Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-02-19T08:13:08.931588Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:08.931635Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-02-19T08:13:08.931692Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.931762Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-02-19T08:13:08.931808Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-02-19T08:13:08.931838Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:08.931914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:08.931954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.931994Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-02-19T08:13:08.932015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-02-19T08:13:08.932041Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:08.932095Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-02-19T08:13:08.932125Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:08.932243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-02-19T08:13:08.932302Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.932366Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2026-02-19T08:13:08.932400Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-02-19T08:13:08.932424Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:08.932505Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-02-19T08:13:08.932549Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.932589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-02-19T08:13:08.932614Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2026-02-19T08:13:08.932646Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:08.932740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-02-19T08:13:08.932795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:08.932842Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-02-19T08:13:08.932863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-02-19T08:13:08.932891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-02-19T08:13:08.933085Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-02-19T08:13:08.946075Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-02-19T08:13:08.946169Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> TPopulatorTest::Boot [GOOD] >> TPopulatorTest::RemoveDir >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:07.838246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:07.838342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:07.838404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:07.838453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:07.838489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:07.838525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:07.838548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:07.838581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:07.838647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:07.838752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:07.839546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:07.839837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:07.929608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:07.929676Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:07.959981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:07.975394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:07.975601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:07.990957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:07.991822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:07.992547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:07.992901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:07.996959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:07.997140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:07.998321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:07.998390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:07.998491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:07.998563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:07.998605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:07.998797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:08.179308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.180680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.180832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.180937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:08.181783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... roposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:09.597309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:13:09.597491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:13:09.597640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:13:09.597731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:09.604579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:13:09.605038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:09.605078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:13:09.605277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:13:09.605488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:09.605534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-02-19T08:13:09.605572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:13:09.605862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:13:09.605930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:13:09.606030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:13:09.606066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:09.606098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:13:09.606126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:09.606159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:13:09.606204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:09.606266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:13:09.606297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:13:09.606436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:13:09.606485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-02-19T08:13:09.606517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:13:09.606549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:13:09.607963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:09.608045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:09.608079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:13:09.608118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:13:09.608170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:13:09.616312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:09.616425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:09.616485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:13:09.616518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:13:09.616576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:09.616651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:13:09.622676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:13:09.630226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:13:09.630499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:13:09.630541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:13:09.630986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:13:09.631074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:13:09.631112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:991:2910] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } TestModificationResults wait txId: 106 2026-02-19T08:13:09.634521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:09.634796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:13:09.634974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split and merge operations disabled, at schemeshard: 72057594046678944 2026-02-19T08:13:09.638615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split and merge operations disabled" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:09.638889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split and merge operations disabled, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:13:09.639211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:13:09.639261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:13:09.639610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:13:09.639695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:13:09.639756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:998:2917] TestWaitNotification: OK eventTxId 106 >> TPopulatorTestWithResets::UpdateAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2026-02-19T08:13:09.809851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:09.809945Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TPopulatorTest::RemoveDir [GOOD] >> TPopulatorTest::MakeDir >> TIcNodeCache::GetNodesInfoTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2026-02-19T08:12:53.238862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486348402545576:2108];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:53.240407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.300031Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486345362494748:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:53.300080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.389092Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:53.405185Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:53.405437Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:53.713872Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.724216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.756543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.756656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.759698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.759787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.786711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.815371Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:12:53.820558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.858545Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.950688Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:54.058175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:54.090363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00508c/r3tmp/yandexRi3Dgp.tmp 2026-02-19T08:12:54.090389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00508c/r3tmp/yandexRi3Dgp.tmp 2026-02-19T08:12:54.090536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00508c/r3tmp/yandexRi3Dgp.tmp 2026-02-19T08:12:54.090612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:54.153838Z INFO: TTestServer started on Port 21348 GrpcPort 17007 2026-02-19T08:12:54.269215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.301872Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:17007 2026-02-19T08:12:54.453041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:54.614303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.396127Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486362542364142:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.400031Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486362542364116:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.400128Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.401310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486362542364168:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.401364Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.403811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:57.443759Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486362542364145:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-02-19T08:12:57.520841Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486362542364174:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:57.760711Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486365582416135:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:12:57.761477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.763364Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZjNjNzY0MTctZjllYTc0NDktNDFiNjU5NTQtNjRlOWNl, ActorId: [1:7608486365582416089:2336], ActorState: ExecuteState, LegacyTraceId: 01khtfb5845gvbpmte40jtkhx2, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:12:57.764685Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608486362542364181:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:12:57.766059Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=M2Q0OTM2MWItY2M0N2FjZTctYjZhZjk1NDgtNGMxNjI3Nzc=, ActorId: [2:7608486362542364113:2310], ActorState: ExecuteState, LegacyTraceId: 01khtfb55d2defm7sm9arw8rrf, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:12:57.766391Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:12:57.766120Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" iss ... nanos: 189000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1771488785 nanos: 120000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1771488785 nanos: 149000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1771488785 nanos: 5000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1771488785 nanos: 34000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2026-02-19T08:13:07.658117Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-02-19T08:13:07.658220Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:500: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2026-02-19T08:13:07.662023Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486408532091189:2528]: Request location 2026-02-19T08:13:07.672805Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486408532091191:2529] connected; active server actors: 1 2026-02-19T08:13:07.672885Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-02-19T08:13:07.672899Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-02-19T08:13:07.672912Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-02-19T08:13:07.672925Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-02-19T08:13:07.672938Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-02-19T08:13:07.672949Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-02-19T08:13:07.672962Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-02-19T08:13:07.672975Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-02-19T08:13:07.672985Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-02-19T08:13:07.672998Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-02-19T08:13:07.673010Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-02-19T08:13:07.673021Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-02-19T08:13:07.673031Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-02-19T08:13:07.673042Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-02-19T08:13:07.673054Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2026-02-19T08:13:07.674012Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486408532091189:2528]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1771488784583 tx_id: 281474976715670 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-02-19T08:13:07.677833Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486408532091191:2529] disconnected. 2026-02-19T08:13:07.677913Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486408532091191:2529] disconnected; active server actors: 1 2026-02-19T08:13:07.677947Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486408532091191:2529] disconnected no session 2026-02-19T08:13:07.681885Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-02-19T08:13:07.682055Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:500: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1771488784583 tx_id: 281474976715670 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2026-02-19T08:13:07.690656Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-02-19T08:13:07.690771Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:500: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TPopulatorQuorumTest::TwoRingGroups >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> TPopulatorQuorumTest::TwoRingGroups [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2026-02-19T08:13:10.661833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:10.661938Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 3: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.203166Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.203324Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:99:2126], cookie# 101 2026-02-19T08:13:11.203371Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 8 2026-02-19T08:13:11.203429Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 101 2026-02-19T08:13:11.203466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 101 2026-02-19T08:13:11.203500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 101 2026-02-19T08:13:11.203624Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:100:2127], cookie# 101 2026-02-19T08:13:11.204032Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2125], cookie# 101 2026-02-19T08:13:11.204157Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2126], cookie# 101 2026-02-19T08:13:11.204427Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-02-19T08:13:11.204717Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:100:2127], cookie# 101 2026-02-19T08:13:11.204758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2124] Ack for unknown update (already acked?): sender# [1:100:2127], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-02-19T08:13:11.206775Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 447, preserialized size# 65 2026-02-19T08:13:11.206834Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 9 2026-02-19T08:13:11.207059Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000038, drop txId: 101" Path: "/Root/DirB" PathId: 38 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 307, preserialized size# 0 2026-02-19T08:13:11.207105Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 101, is deletion# true, version: 0 2026-02-19T08:13:11.207180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.207215Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.207273Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:13:11.207422Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.207452Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.207480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 101 2026-02-19T08:13:11.207666Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:12:2059], cookie# 101 2026-02-19T08:13:11.207726Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:15:2062], cookie# 101 2026-02-19T08:13:11.207760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:18:2065], cookie# 101 2026-02-19T08:13:11.207863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:98:2125], cookie# 101 2026-02-19T08:13:11.207911Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2026-02-19T08:13:11.207951Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2026-02-19T08:13:11.207984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2026-02-19T08:13:11.208056Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:99:2126], cookie# 101 2026-02-19T08:13:11.208083Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 9 2026-02-19T08:13:11.208149Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:100:2127], cookie# 101 2026-02-19T08:13:11.208374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:98:2125], cookie# 101 2026-02-19T08:13:11.208500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:99:2126], cookie# 101 2026-02-19T08:13:11.208545Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 18446744073709551615 2026-02-19T08:13:11.208775Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:100:2127], cookie# 101 2026-02-19T08:13:11.208820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2124] Ack for unknown update (already acked?): sender# [1:100:2127], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: 2026-02-19T08:12:43.399294Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486305207491600:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:43.399348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:43.786308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:43.786416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:43.825846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:43.848736Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:43.854570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486305207491573:2081] 1771488763392928 != 1771488763392931 2026-02-19T08:12:43.870366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:43.989054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:43.989078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:43.989085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:43.989198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:44.107164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.425841Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:44.650508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:44.658748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:44.708518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:44.847711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.012857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.109567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.093323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486322387362634:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.093439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.093817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486322387362644:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.093895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.398043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.430289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.458176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.493406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.556258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.583874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.621660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.663960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.754987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486322387363523:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.755073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.755216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486322387363528:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.755264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486322387363530:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.755305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.758901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:47.770558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486322387363532:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:47.831190Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486322387363583:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:48.399702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486305207491600:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:48.399812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:49.278965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... 02-19T08:12:59.540212Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:59.637947Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:59.801819Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:59.803436Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608486374443701066:2081] 1771488779499428 != 1771488779499431 2026-02-19T08:12:59.821112Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:59.825532Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:59.861426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:59.885794Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:00.096067Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:00.096087Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:00.096094Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:00.096173Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:00.268019Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:00.546294Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:00.972515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:00.993402Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.106834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.325184Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.458339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:04.501979Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486374443701097:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:04.502046Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:04.856691Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486395918539425:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:04.856776Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:04.857099Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486395918539434:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:04.857138Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:04.969163Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.031480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.101406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.169621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.232536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.307497Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.383756Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.491393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.633744Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486400213507598:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:05.633841Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:05.634169Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486400213507603:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:05.634211Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486400213507604:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:05.634299Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:05.639155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:05.659312Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486400213507607:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:05.752875Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486400213507660:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:08.065070Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:08.578236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:08.578333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:08.579577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:08.579623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:08.579659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:08.579718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:08.579771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:08.579799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:08.579890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:08.579972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:08.580793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:08.581126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:08.823282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:08.823368Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:08.845963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:08.850068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:08.850307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:08.877366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:08.878099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:08.878820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:08.879143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:08.888065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:08.888264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:08.889452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:08.889525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:08.889635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:08.889677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:08.889732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:08.889920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:09.076369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.077363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.077487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.077584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.077655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:09.078880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... at schemeshard: 72057594046678944 2026-02-19T08:13:10.702576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.702670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.702721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.702852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.702917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.702976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.709869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:10.718444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:10.718540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:10.719164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:10.719250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:10.719315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:10.720995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1123:3030] sender: [1:1184:2058] recipient: [1:15:2062] 2026-02-19T08:13:10.826838Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:10.827108Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 312us result status StatusSuccess 2026-02-19T08:13:10.827775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 109 2026-02-19T08:13:10.835104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:10.835379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.835533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-02-19T08:13:10.846863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:10.847170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-02-19T08:13:10.847638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-02-19T08:13:10.847684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-02-19T08:13:10.848151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-02-19T08:13:10.848255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-02-19T08:13:10.848291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:1189:3082] TestWaitNotification: OK eventTxId 109 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 111 2026-02-19T08:13:10.851223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:10.851469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 111:0, at schemeshard: 72057594046678944 2026-02-19T08:13:10.851678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:10.857769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusInvalidParameter Reason: "Unable to change bounds of non-root partition: 1" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:10.858080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 111, wait until txId: 111 TestWaitNotification wait txId: 111 2026-02-19T08:13:10.858539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 111: send EvNotifyTxCompletion 2026-02-19T08:13:10.858585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 111 2026-02-19T08:13:10.859091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-02-19T08:13:10.859203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-02-19T08:13:10.859243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:1196:3089] TestWaitNotification: OK eventTxId 111 >> TPopulatorQuorumTest::OneRingGroup >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-02-19T08:13:11.814423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-02-19T08:13:11.835337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-02-19T08:13:11.835442Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-02-19T08:13:11.852008Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-02-19T08:13:11.852127Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-02-19T08:13:11.852180Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-02-19T08:13:11.852285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-02-19T08:13:11.852311Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-02-19T08:13:11.852340Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-02-19T08:13:11.852387Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-02-19T08:13:11.852415Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-02-19T08:13:11.852435Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-02-19T08:13:11.852472Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-02-19T08:13:11.852495Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-02-19T08:13:11.852532Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-02-19T08:13:11.852571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-02-19T08:13:11.852595Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-02-19T08:13:11.852613Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-02-19T08:13:11.852667Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-02-19T08:13:11.852695Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-02-19T08:13:11.852714Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-02-19T08:13:11.852811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:11.852926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-02-19T08:13:11.853001Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.853179Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:11.853239Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.853319Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.853419Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-02-19T08:13:11.853483Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-02-19T08:13:11.853549Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:11.853602Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.853654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.853747Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-02-19T08:13:11.853795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-02-19T08:13:11.853840Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.853970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-02-19T08:13:11.854030Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-02-19T08:13:11.854090Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.854200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-02-19T08:13:11.854243Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.854298Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-02-19T08:13:11.854335Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-02-19T08:13:11.854381Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.854449Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-02-19T08:13:11.854498Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.854557Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-02-19T08:13:11.854591Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-02-19T08:13:11.854650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.854730Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-02-19T08:13:11.854781Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.854836Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:11.854872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.854926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-02-19T08:13:11.854975Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:11.855012Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-02-19T08:13:11.855080Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.855144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-02-19T08:13:11.855189Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-02-19T08:13:11.855230Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.855333Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:11.855370Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.855414Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-02-19T08:13:11.855434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-02-19T08:13:11.855460Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.855509Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-02-19T08:13:11.855530Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.855621Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-02-19T08:13:11.855686Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.855738Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2026-02-19T08:13:11.855772Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-02-19T08:13:11.855795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.855902Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-02-19T08:13:11.855945Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.855993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-02-19T08:13:11.856026Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2026-02-19T08:13:11.856056Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.856146Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-02-19T08:13:11.856206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.856262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-02-19T08:13:11.856285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2026-02-19T08:13:11.856312Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-02-19T08:13:11.856513Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-02-19T08:13:11.856560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-02-19T08:13:11.856598Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2026-02-19T08:13:11.870264Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2026-02-19T08:13:11.870346Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TPopulatorTest::MakeDir [GOOD] >> TPopulatorQuorumTest::OneRingGroup [GOOD] >> KqpVectorIndexes::BuildIndexTimesAndUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-02-19T08:13:11.943317Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-02-19T08:13:11.969240Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-02-19T08:13:11.969350Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-02-19T08:13:11.975941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-02-19T08:13:11.976032Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-02-19T08:13:11.976090Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-02-19T08:13:11.976180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-02-19T08:13:11.976208Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-02-19T08:13:11.976254Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-02-19T08:13:11.976299Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-02-19T08:13:11.976321Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-02-19T08:13:11.976343Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-02-19T08:13:11.976460Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:11.976590Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.976777Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-02-19T08:13:11.976841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.976936Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-02-19T08:13:11.977009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:11.977110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-02-19T08:13:11.977174Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.977259Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:11.977311Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.977397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-02-19T08:13:11.977476Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-02-19T08:13:11.977557Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-02-19T08:13:11.977608Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-02-19T08:13:11.977677Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-02-19T08:13:11.982258Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.982353Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-02-19T08:13:11.982513Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-02-19T08:13:11.982551Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 2026-02-19T08:13:11.982612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.982726Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-02-19T08:13:11.982796Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.982853Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-02-19T08:13:11.982887Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-02-19T08:13:11.982925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-02-19T08:13:11.982969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-02-19T08:13:11.983005Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-02-19T08:13:11.983043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:11.983099Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-02-19T08:13:11.983128Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2026-02-19T08:13:11.983296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-02-19T08:13:11.994194Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-02-19T08:13:11.994298Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2026-02-19T08:13:11.256368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:11.256437Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.920167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920222Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.920267Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.920312Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920377Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.920466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920519Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.920553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920605Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.920652Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920712Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.920751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920808Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.920844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.920931Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.920969Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.921003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921050Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.921077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.921173Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921225Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.921256Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921308Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.921336Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.921374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921416Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:97:2124] 2026-02-19T08:13:11.921464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:24339059:0] }: sender# [1:98:2125] 2026-02-19T08:13:11.921500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2124] 2026-02-19T08:13:11.921567Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:1099535966835:0] }: sender# [1:99:2126] 2026-02-19T08:13:11.921612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2026-02-19T08:13:11.921648Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2124] 2026-02-19T08:13:11.925750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.925957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2026-02-19T08:13:11.926024Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:97:2124] 2026-02-19T08:13:11.926152Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:97:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:2199047594611:0] }: sender# [1:100:2127] 2026-02-19T08:13:11.926218Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:100:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:97:2124] 2026-02-19T08:13:11.926279Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:100:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] TestWaitNotification: OK eventTxId 100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:07.562238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:07.562325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:07.562389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:07.562431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:07.562470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:07.562513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:07.562548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:07.562609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:07.562688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:07.562768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:07.563825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:07.564140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:07.662425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:07.662485Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:07.674445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:07.675134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:07.675324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:07.686869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:07.687611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:07.688360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:07.688721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:07.692151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:07.692348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:07.693558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:07.693645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:07.693777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:07.693823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:07.693864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:07.694083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:07.869872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.873322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.873521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.874949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.875018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:07.875097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:13:11.521027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:13:11.521053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:13:11.521198Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.521624Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.521775Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:13:11.521823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:11.521859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:13:11.521880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:13:11.521901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:13:11.522037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.522115Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.522331Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-02-19T08:13:11.522542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:13:11.522877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.522983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.523371Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.523451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.523834Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524004Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524140Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524384Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.524874Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525133Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525208Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525257Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525453Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.525513Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:11.531075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:11.535594Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:11.535672Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:11.536357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:11.536421Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:11.536524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:11.537210Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1084:2979] sender: [2:1145:2058] recipient: [2:15:2062] 2026-02-19T08:13:11.598751Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:11.599057Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 338us result status StatusSuccess 2026-02-19T08:13:11.599828Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-02-19T08:13:12.796219Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-02-19T08:13:12.802797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2026-02-19T08:13:12.802917Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-02-19T08:13:12.804836Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-02-19T08:13:12.804910Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2026-02-19T08:13:12.804965Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2026-02-19T08:13:12.805049Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-02-19T08:13:12.805073Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2026-02-19T08:13:12.805113Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2026-02-19T08:13:12.805160Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-02-19T08:13:12.805182Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2026-02-19T08:13:12.805202Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2026-02-19T08:13:12.805310Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-02-19T08:13:12.805409Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-02-19T08:13:12.805587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2026-02-19T08:13:12.805654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-02-19T08:13:12.805850Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-02-19T08:13:12.805959Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-02-19T08:13:12.806048Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2026-02-19T08:13:12.806115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-02-19T08:13:12.806201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-02-19T08:13:12.806247Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-02-19T08:13:12.806311Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-02-19T08:13:12.806383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2026-02-19T08:13:12.806456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-02-19T08:13:12.806509Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-02-19T08:13:12.806589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-02-19T08:13:12.806650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-02-19T08:13:12.806723Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-02-19T08:13:12.806844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2026-02-19T08:13:12.806879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 2026-02-19T08:13:12.806922Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:12.807013Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-02-19T08:13:12.807076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-02-19T08:13:12.807173Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-02-19T08:13:12.807208Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-02-19T08:13:12.807246Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-02-19T08:13:12.807325Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2026-02-19T08:13:12.807354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 2026-02-19T08:13:12.807383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-02-19T08:13:12.807451Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2026-02-19T08:13:12.807471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2026-02-19T08:13:12.807623Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2026-02-19T08:13:12.822020Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 12345 2026-02-19T08:13:12.822105Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2026-02-19T08:12:53.291431Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486349499133563:2248];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:53.291510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.352682Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:53.425905Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486348485309730:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:53.426021Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.524502Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:53.779442Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.808072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.854049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.854157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.855621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.855688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.868805Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:12:53.869035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.905995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:54.120238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:54.123032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:54.126726Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:54.318301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.401623Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.426772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00508d/r3tmp/yandexYgp9KJ.tmp 2026-02-19T08:12:54.426800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00508d/r3tmp/yandexYgp9KJ.tmp 2026-02-19T08:12:54.426979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00508d/r3tmp/yandexYgp9KJ.tmp 2026-02-19T08:12:54.427080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:54.533843Z INFO: TTestServer started on Port 15003 GrpcPort 16910 PQClient connected to localhost:16910 2026-02-19T08:12:54.969593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:55.123986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.297992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486349499133563:2248];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:58.298057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:58.406378Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486348485309730:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:58.406466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:59.125582Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486374255113682:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.125667Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486374255113711:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.125881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.128033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486374255113717:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.128147Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.133581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:59.173961Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486374255113716:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:12:59.283104Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486374255113745:2184] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:59.664684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:59.672919Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608486374255113752:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:12:59.674583Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=OTJlZGI5YjItODVmYzI0YWQtYjkwNjcyOGYtYzg1YjI0Yzk=, ActorId: [2:7608486374255113675:2310], ActorState: ExecuteState, LegacyTraceId: 01khtfb6vc4j3drcmt0n546h0m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:12:59.676800Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:12:59.680749Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486375268938613:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:12:59.682636Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NzRiZWVlZTItNjc1YmJlNzItOWU4NDM3OS1jNTBlY2U2MQ==, ActorId: [1:7608486375268938587:2337], ActorState: ExecuteState, LegacyTraceId: 01khtfb6x096swnrkddzjv16xv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:12:59.682973Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:12:59.805765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:00.012416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608486379563906364:3393] === CheckClustersList. Ok 2026-02-19T08:13:08.777942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:13:08.777978Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2026-02-19T08:13:12.911342Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:12.912705Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-02-19T08:13:12.912781Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-02-19T08:13:12.913103Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-02-19T08:13:12.913232Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:12.913396Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2026-02-19T08:13:12.200991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:12.201069Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... d.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:15:2062], cookie# 100 2026-02-19T08:13:12.743483Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:18:2065], cookie# 100 2026-02-19T08:13:12.743597Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2126], cookie# 100 2026-02-19T08:13:12.743629Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2026-02-19T08:13:12.744075Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:100:2127], cookie# 100 2026-02-19T08:13:12.744224Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:98:2125], cookie# 100 2026-02-19T08:13:12.744619Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:99:2126], cookie# 100 2026-02-19T08:13:12.744656Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 2 2026-02-19T08:13:12.744942Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:100:2127], cookie# 100 2026-02-19T08:13:12.744974Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2124] Ack for unknown update (already acked?): sender# [1:100:2127], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000037 FAKE_COORDINATOR: Erasing txId 100 2026-02-19T08:13:12.747277Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 555, preserialized size# 119 2026-02-19T08:13:12.747339Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 7 2026-02-19T08:13:12.747487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.747535Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.747564Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.748600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:97:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 314, preserialized size# 2 2026-02-19T08:13:12.748670Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:97:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 100, is deletion# false, version: 3 2026-02-19T08:13:12.748795Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:12:2059], cookie# 100 2026-02-19T08:13:12.748846Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:15:2062], cookie# 100 2026-02-19T08:13:12.748881Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:18:2065], cookie# 100 2026-02-19T08:13:12.748980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.749020Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.749134Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:98:2125], cookie# 100 2026-02-19T08:13:12.749186Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:97:2124], cookie# 100 2026-02-19T08:13:12.749280Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:99:2126], cookie# 100 2026-02-19T08:13:12.749307Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 7 2026-02-19T08:13:12.749368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 100 2026-02-19T08:13:12.749421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 100 2026-02-19T08:13:12.749465Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:100:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 100 2026-02-19T08:13:12.749702Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:100:2127], cookie# 100 2026-02-19T08:13:12.749827Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2125], cookie# 100 2026-02-19T08:13:12.750103Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2126], cookie# 100 2026-02-19T08:13:12.750134Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:97:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-02-19T08:13:12.750176Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:97:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:100:2127], cookie# 100 2026-02-19T08:13:12.750198Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:97:2124] Ack for unknown update (already acked?): sender# [1:100:2127], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2026-02-19T08:12:54.518004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486351964447016:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:54.518270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:54.564993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:54.597898Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:54.706281Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:54.928191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:54.928539Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:54.928693Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:55.148191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:55.148291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:55.149495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:55.149577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:55.182891Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:12:55.183061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:55.214947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:55.402512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:55.406018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:55.440011Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:55.526106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:55.678259Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:55.805606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/005088/r3tmp/yandexH0dnh8.tmp 2026-02-19T08:12:55.805638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/005088/r3tmp/yandexH0dnh8.tmp 2026-02-19T08:12:55.805798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/005088/r3tmp/yandexH0dnh8.tmp 2026-02-19T08:12:55.805949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:55.866088Z INFO: TTestServer started on Port 61083 GrpcPort 24407 PQClient connected to localhost:24407 2026-02-19T08:12:56.200398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:56.342173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:59.528338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486351964447016:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:59.528665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:59.530010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486373439284710:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.530187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.533836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486373439284751:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.533899Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486371642069189:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.534031Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.534273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486371642069202:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.536400Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486371642069204:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.536459Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.534234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486373439284755:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.534294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.539138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:59.541790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486373439284786:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.541901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.543134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486373439284789:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.543208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:59.549053Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486371642069207:2179] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:12:59.576198Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486371642069205:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2026-02-19T08:12:59.575841Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486373439284756:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2026-02-19T08:12:59.639751Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486373439284842:3028] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:59.657175Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486371642069233:2185] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, ... 7892] has a tx writes info 2026-02-19T08:13:07.057169Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037892][Partition][3][StateInit] bootstrapping 3 [2:7608486406001808411:2383] 2026-02-19T08:13:07.069965Z node 1 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.070210Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7608486403504057640:2496] 2026-02-19T08:13:07.075111Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7608486403504057643:2496] 2026-02-19T08:13:07.081207Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037892][Partition][0][StateInit] bootstrapping 0 [2:7608486406001808412:2383] 2026-02-19T08:13:07.086345Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2026-02-19T08:13:07.086403Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2026-02-19T08:13:07.109345Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:07.109429Z node 2 :PERSQUEUE INFO: pq_impl.cpp:601: [PQ: 72075186224037894] has a tx writes info 2026-02-19T08:13:07.112968Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037894][Partition][8][StateInit] bootstrapping 8 [2:7608486406001808423:2386] 2026-02-19T08:13:07.115631Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [2:7608486406001808424:2386] 2026-02-19T08:13:07.118663Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:07.118739Z node 2 :PERSQUEUE INFO: pq_impl.cpp:601: [PQ: 72075186224037897] has a tx writes info 2026-02-19T08:13:07.119487Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7608486406001808453:2385] 2026-02-19T08:13:07.127068Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7608486406001808455:2385] 2026-02-19T08:13:07.141686Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.145891Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.145992Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.146341Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7608486406001808404:2384] 2026-02-19T08:13:07.146525Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7608486406001808405:2384] 2026-02-19T08:13:07.154684Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.154768Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.154966Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7608486406001808423:2386] 2026-02-19T08:13:07.155104Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7608486406001808424:2386] 2026-02-19T08:13:07.155621Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2026-02-19T08:13:07.155668Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.155966Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037892][Partition][3][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7608486406001808411:2383] 2026-02-19T08:13:07.156059Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7608486406001808412:2383] 2026-02-19T08:13:07.159174Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.159256Z node 2 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:13:07.159503Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7608486406001808453:2385] 2026-02-19T08:13:07.159600Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7608486406001808455:2385] 2026-02-19T08:13:07.160069Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2026-02-19T08:13:07.160088Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2026-02-19T08:13:07.161142Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2026-02-19T08:13:09.801862Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-02-19T08:13:09.801980Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1250: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2026-02-19T08:13:09.802080Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1260: TDescribePartitionActor[1:7608486416388959735:2518]: Bootstrap 2026-02-19T08:13:09.806277Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486416388959735:2518]: Request location 2026-02-19T08:13:09.812313Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959737:2519] connected; active server actors: 1 2026-02-19T08:13:09.812990Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-02-19T08:13:09.813252Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486416388959735:2518]: Got location 2026-02-19T08:13:09.814173Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959737:2519] disconnected. 2026-02-19T08:13:09.814199Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959737:2519] disconnected; active server actors: 1 2026-02-19T08:13:09.814213Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959737:2519] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-02-19T08:13:09.816267Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-02-19T08:13:09.816382Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1250: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2026-02-19T08:13:09.816416Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1260: TDescribePartitionActor[1:7608486416388959738:2520]: Bootstrap 2026-02-19T08:13:09.817119Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486416388959738:2520]: Request location 2026-02-19T08:13:09.818137Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959741:2522] connected; active server actors: 1 2026-02-19T08:13:09.818709Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-02-19T08:13:09.819858Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486416388959738:2520]: Got location Got response: 2026-02-19T08:13:09.821246Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959741:2522] disconnected. 2026-02-19T08:13:09.821278Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959741:2522] disconnected; active server actors: 1 2026-02-19T08:13:09.821295Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486416388959741:2522] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1771488787 nanos: 49000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2026-02-19T08:13:09.823390Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-02-19T08:13:09.823482Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1250: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2026-02-19T08:13:09.823511Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1260: TDescribePartitionActor[1:7608486416388959743:2523]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2026-02-19T08:13:10.041836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:13:10.041873Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> Cdc::StringEscaping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2026-02-19T08:13:14.308213Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:14.309513Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2026-02-19T08:13:14.309584Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-02-19T08:13:14.309726Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |90.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2026-02-19T08:13:13.796147Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:13.797522Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-02-19T08:13:13.797615Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-02-19T08:13:13.797897Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:13.798099Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2026-02-19T08:13:13.798183Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2026-02-19T08:13:13.785109Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:13.796502Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:13.796602Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-02-19T08:13:13.796732Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-02-19T08:13:14.095960Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:14.096636Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:14.096713Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:14.096911Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-02-19T08:13:14.096975Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-02-19T08:13:14.097061Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TTopicApiDescribes::DescribeTopic [GOOD] |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TVPatchTests::PatchPartPutError >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::BuildIndexTimesAndUser [GOOD] Test command err: 2026-02-19T08:12:52.934716Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486341380484952:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.935801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.393786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.406866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.406953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.510198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.519669Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.521026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486341380484891:2081] 1771488772897598 != 1771488772897601 2026-02-19T08:12:53.754403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.754442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.754462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.754525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:53.785828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.946747Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.449132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:54.555159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.823487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.087558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.163286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.845943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486362855323257:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.846058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.849824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486362855323267:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.849910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.933608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486341380484952:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.933671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:58.279592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.319848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.356927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.415569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.459384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.520418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.570070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.650809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.782078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486367150291441:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.782165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.782667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486367150291446:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.782706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486367150291447:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.782802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.787583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:58.805026Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486367150291450:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:58.891311Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486367150291503:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:01.034625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486380035193736:3983], Recipient [1:7608486345675452543:2145]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:13:01.034660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026- ... on 2026-02-19T08:13:04.458375Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:04.732723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:04.741922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:13:04.756873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:04.837013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.031465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:05.154667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.191412Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486413301679305:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:08.191535Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:08.191884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486413301679315:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:08.191924Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:08.313104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.370915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.427926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.472158Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486391826840982:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:08.472215Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:08.491906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.561616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.654886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.719431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:08.792572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:09.005396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486417596647483:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:09.005491Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:09.005789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486417596647488:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:09.005831Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486417596647489:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:09.005941Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:09.010450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:09.036995Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486417596647492:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:09.100486Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486417596647543:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:11.224034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:11.519971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:11.703694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:11.819181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:11.896108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715764:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:13:11.987572Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:11.987604Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2026-02-19T08:13:11.987619Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::PatchPartGetError >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TVPatchTests::PatchPartOk >> TVPatchTests::PatchPartOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2026-02-19T08:12:56.829945Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007865s 2026-02-19T08:12:56.840229Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486358204052023:2271];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:56.840314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:56.910344Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:56.975952Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486358680757249:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.006031Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:57.061831Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:57.062087Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:57.473949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:57.476269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:57.668100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:57.782937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:57.783045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:57.794464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:57.794542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:57.830534Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:12:57.830694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:57.883202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:57.984699Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:58.002240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:58.033541Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:58.038315Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:58.053525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:58.190213Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007957s 2026-02-19T08:12:58.222010Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008361s 2026-02-19T08:12:58.439868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/005084/r3tmp/yandexHzZq3T.tmp 2026-02-19T08:12:58.439895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/005084/r3tmp/yandexHzZq3T.tmp 2026-02-19T08:12:58.440071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/005084/r3tmp/yandexHzZq3T.tmp 2026-02-19T08:12:58.440168Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:58.554737Z INFO: TTestServer started on Port 63751 GrpcPort 19531 PQClient connected to localhost:19531 2026-02-19T08:12:59.105165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:59.258253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.840375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486358204052023:2271];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:01.840462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:01.959942Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486358680757249:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:01.960035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:03.177841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486388268824265:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.177977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486388268824286:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.178049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.183563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486388268824289:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.183635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.191140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486388268824319:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.200332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.200948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486388268824322:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.201028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:03.204814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:03.249519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486388268824288:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:13:03.306433Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486388268824368:3064] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:03.646287Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486388268824379:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:13:03.647619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:03.648957Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NDQ1NzgzMGEtZDE1YmVjLTQ1NzM1YTIzLTljN2JlM2Fk, ActorId: [1:7608486388268824263:2339], ActorState: ExecuteState, LegacyTraceId: 01khtfbasp125v9n152rarwfz5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR ... pping 2 [2:7608486423105267600:2380] 2026-02-19T08:13:11.574752Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7608486423105267604:2382] 2026-02-19T08:13:11.576536Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037892][Partition][0][StateInit] bootstrapping 0 [2:7608486423105267590:2379] 2026-02-19T08:13:11.571539Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037893][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7608486422628563964:2478] 2026-02-19T08:13:11.580786Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7608486423105267600:2380] 2026-02-19T08:13:11.579095Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037899][Partition][4][StateInit] bootstrapping 4 [1:7608486422628563969:2481] 2026-02-19T08:13:11.585451Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037899][Partition][4][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7608486422628563969:2481] 2026-02-19T08:13:11.582463Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7608486423105267590:2379] 2026-02-19T08:13:11.587356Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [2:7608486423105267595:2383] 2026-02-19T08:13:11.589231Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7608486423105267595:2383] 2026-02-19T08:13:11.593322Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7608486423105267605:2382] 2026-02-19T08:13:11.587891Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037898][Partition][11][StateInit] bootstrapping 11 [1:7608486422628563959:2479] 2026-02-19T08:13:11.588585Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037893][Partition][6][StateInit] bootstrapping 6 [1:7608486422628563965:2478] 2026-02-19T08:13:11.590415Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7608486422628563965:2478] 2026-02-19T08:13:11.599124Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037896][Partition][5][StateInit] bootstrapping 5 [1:7608486422628563972:2480] 2026-02-19T08:13:11.601142Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7608486422628563972:2480] 2026-02-19T08:13:11.601886Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7608486422628563959:2479] 2026-02-19T08:13:11.602695Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037896][Partition][10][StateInit] bootstrapping 10 [1:7608486422628563974:2480] 2026-02-19T08:13:11.604404Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.604536Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7608486422628563974:2480] 2026-02-19T08:13:11.605002Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.603443Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.605003Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.608372Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.610279Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7608486423105267605:2382] 2026-02-19T08:13:11.619751Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.621266Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:13:11.619751Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig ===Query complete Create topic result: 1 2026-02-19T08:13:11.653609Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564055:3977] connected; active server actors: 1 2026-02-19T08:13:11.653695Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 1 2026-02-19T08:13:11.653724Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-02-19T08:13:11.653735Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 1 2026-02-19T08:13:11.653749Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-02-19T08:13:11.653759Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 1 2026-02-19T08:13:11.653771Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-02-19T08:13:11.653783Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 1 2026-02-19T08:13:11.653794Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 1 2026-02-19T08:13:11.653804Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 1 2026-02-19T08:13:11.653813Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 1 2026-02-19T08:13:11.653822Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 1 2026-02-19T08:13:11.653841Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 1 2026-02-19T08:13:11.653854Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 1 2026-02-19T08:13:11.653864Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 1 2026-02-19T08:13:11.653874Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 1 2026-02-19T08:13:11.651528Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486422628564046:3974]: Request location 2026-02-19T08:13:11.662725Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564055:3977] disconnected. 2026-02-19T08:13:11.662764Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564055:3977] disconnected; active server actors: 1 2026-02-19T08:13:11.662782Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564055:3977] disconnected no session 2026-02-19T08:13:11.662815Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564058:3980] connected; active server actors: 1 2026-02-19T08:13:11.659038Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486422628564046:3974]: Got location 2026-02-19T08:13:11.660099Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486422628564056:3978]: Request location 2026-02-19T08:13:11.669925Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-02-19T08:13:11.669955Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-02-19T08:13:11.669968Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-02-19T08:13:11.673977Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486422628564056:3978]: Got location 2026-02-19T08:13:11.677817Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564058:3980] disconnected. 2026-02-19T08:13:11.677869Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564058:3980] disconnected; active server actors: 1 2026-02-19T08:13:11.677883Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486422628564058:3980] disconnected no session 2026-02-19T08:13:11.678228Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486422628564059:3981]: Request location 2026-02-19T08:13:12.653875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:13:12.653924Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2026-02-19T08:13:15.740331Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.741113Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-02-19T08:13:15.741175Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-02-19T08:13:15.741430Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-02-19T08:13:15.741542Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:15.741691Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2026-02-19T08:13:15.518180Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.518824Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:15.518880Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:15.519061Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-02-19T08:13:15.519103Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.519325Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-02-19T08:13:15.519372Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-02-19T08:13:15.519443Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-02-19T08:13:15.519600Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2026-02-19T08:13:15.519638Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-02-19T08:13:15.519703Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2026-02-19T08:13:15.839328Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.840166Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:15.840224Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:15.840450Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-02-19T08:13:15.840509Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.840775Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-02-19T08:13:15.840852Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2026-02-19T08:13:15.580861Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:15.582270Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:15.582346Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:15.582613Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-02-19T08:13:15.582727Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-02-19T08:13:15.582822Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-02-19T08:13:15.760141Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2026-02-19T08:13:15.777843Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:735} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2026-02-19T08:13:15.777952Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-02-19T08:13:15.778082Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2026-02-19T08:13:16.071767Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:16.072487Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-02-19T08:13:16.072535Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-02-19T08:13:16.072733Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-02-19T08:13:16.072774Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-02-19T08:13:16.072943Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-02-19T08:13:16.073033Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-02-19T08:13:16.073123Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-02-19T08:13:16.073300Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2026-02-19T08:13:16.073338Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-02-19T08:13:16.073387Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> KqpVectorIndexes::EmptyVectorIndexUpdate-Partitioned+Overlap [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::Boot >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2026-02-19T08:12:56.352621Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486358483763237:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:56.352686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:56.528496Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:56.530085Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:12:57.102082Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:57.102209Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:57.145827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:57.231866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:57.231976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:57.243208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:57.243310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:57.286565Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:12:57.286779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:57.355734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:57.463655Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:57.549668Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:57.592210Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:57.610400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:57.617819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:57.930431Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.027198s 2026-02-19T08:12:58.055003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/005087/r3tmp/yandexanioUK.tmp 2026-02-19T08:12:58.055029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/005087/r3tmp/yandexanioUK.tmp 2026-02-19T08:12:58.055215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/005087/r3tmp/yandexanioUK.tmp 2026-02-19T08:12:58.055316Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:58.223612Z INFO: TTestServer started on Port 19562 GrpcPort 17730 PQClient connected to localhost:17730 2026-02-19T08:12:58.667113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:58.729969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:12:58.824770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.353908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486358483763237:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:01.353983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:02.192191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486384253568287:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:02.192425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:02.194408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486384253568323:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:02.194480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486384253568324:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:02.194804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:02.202476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:02.239200Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486384253568328:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:13:02.319527Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486384253568418:3065] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:02.695824Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486384253568428:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:13:02.698686Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YzlhOGY4Yi1kZjA1NjI2Ny0zZmYwM2ZmZS1iYzhlNDYzNQ==, ActorId: [1:7608486384253568283:2334], ActorState: ExecuteState, LegacyTraceId: 01khtfb9vc317nsd42kvp4vsqw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:13:02.701077Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:13:02.700322Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608486385045284915:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:13:02.702081Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=YjY5OTY3NDAtNTg2ZmM0ODAtNmM1ZjdiZjYtZmFlZGZjYzA=, ActorId: [2:7608486385045284866:2310], ActorState: ExecuteState, LegacyTraceId: 01khtfb9wja367zjfj7f8d46cb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:13:02.702472Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: " ... ] disconnected no session Describe topic with location 2026-02-19T08:13:12.942455Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-02-19T08:13:12.942570Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1227: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2026-02-19T08:13:12.943210Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:686: DescribeTopicImpl [1:7608486427203243377:2544]: Request location 2026-02-19T08:13:12.946011Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486427203243379:2545] connected; active server actors: 1 2026-02-19T08:13:12.946081Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-02-19T08:13:12.946094Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-02-19T08:13:12.946107Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-02-19T08:13:12.946124Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-02-19T08:13:12.946144Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-02-19T08:13:12.946156Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-02-19T08:13:12.946167Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-02-19T08:13:12.946184Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-02-19T08:13:12.946194Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-02-19T08:13:12.946205Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-02-19T08:13:12.946215Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-02-19T08:13:12.946224Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-02-19T08:13:12.946235Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-02-19T08:13:12.946247Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-02-19T08:13:12.946258Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:620: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2026-02-19T08:13:12.947440Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:779: DescribeTopicImpl [1:7608486427203243377:2544]: Got location 2026-02-19T08:13:12.951340Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486427203243379:2545] disconnected. 2026-02-19T08:13:12.951376Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486427203243379:2545] disconnected; active server actors: 1 2026-02-19T08:13:12.951392Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7608486427203243379:2545] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1771488791772 tx_id: 281474976710673 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "_max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } attributes { key: "_timestamp_type" value: "CreateTime" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe topic with no stats or location 2026-02-19T08:13:12.956061Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-02-19T08:13:12.956174Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1227: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1771488791772 tx_id: 281474976710673 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "_max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } attributes { key: "_timestamp_type" value: "CreateTime" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe bad topic 2026-02-19T08:13:12.965222Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-02-19T08:13:12.965341Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1227: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown |90.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |90.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TSchemeshardForcedCompactionTest::SchemeshardShouldCompact >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] |90.5%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::EmptyVectorIndexUpdate-Partitioned+Overlap [GOOD] Test command err: 2026-02-19T08:12:52.785392Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486344859852390:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.785509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.109749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.117652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.118626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.195599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.245819Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.249935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486344859852170:2081] 1771488772745044 != 1771488772745047 2026-02-19T08:12:53.463134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.481126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.481144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.481165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.481250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:53.766226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.176140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:54.233676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.408604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.610945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.726672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.714763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486366334690534:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.714849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.715291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486366334690544:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.715326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.775712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486344859852390:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.775794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:58.116100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.164321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.216432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.265432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.311286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.379347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.462390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.517147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.674953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486370629658720:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.675058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.675403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486370629658725:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.675434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486370629658726:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.675561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.679522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:58.695592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:12:58.695843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486370629658729:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:58.765430Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486370629658780:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:00.925831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486379219593701:3984], Recipient [1:7608486344859852531:2149]: NKikimr::TE ... _TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:13:16.334446Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:13:16.335006Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486403427595634:2340], Recipient [2:7608486394837660086:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037898 TableLocalId: 40 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037898 NodeId: 2 StartTime: 1771488786237 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:13:16.335036Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:13:16.335067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037898 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:13:16.335151Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037898 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:13:16.335173Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2026-02-19T08:13:16.335338Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486403427595635:2341], Recipient [2:7608486394837660086:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037899 TableLocalId: 40 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488793173 LastUpdateTime: 1771488793173 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037899 NodeId: 2 StartTime: 1771488786237 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:13:16.335350Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:13:16.335365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037899 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:13:16.335442Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037899 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488793173 LastUpdateTime: 1771488793173 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:13:16.335589Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486403427595636:2342], Recipient [2:7608486394837660086:2155]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037900 TableLocalId: 40 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488793163 LastUpdateTime: 1771488793163 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 8 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037900 NodeId: 2 StartTime: 1771488786238 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:13:16.335609Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:13:16.335626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037900 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:13:16.335714Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037900 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 40] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488793163 LastUpdateTime: 1771488793163 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 8 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:13:16.439842Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486394837660086:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:13:16.439888Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:13:16.439912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 3 2026-02-19T08:13:16.439977Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 3 2026-02-19T08:13:16.439997Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 3 2026-02-19T08:13:16.440061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046644480:11 data size 0 row count 0 2026-02-19T08:13:16.440099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], pathId map=Logs, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:13:16.440111Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2026-02-19T08:13:16.440175Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:11 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:13:16.440221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2026-02-19T08:13:16.440249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046644480:12 data size 0 row count 0 2026-02-19T08:13:16.440267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037899 maps to shardIdx: 72057594046644480:12 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], pathId map=Logs, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:13:16.440274Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037899, followerId 0 2026-02-19T08:13:16.440302Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:12 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:13:16.440314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037899: SplitByLoadNotEnabledForTable 2026-02-19T08:13:16.440330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046644480:13 data size 0 row count 0 2026-02-19T08:13:16.440349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037900 maps to shardIdx: 72057594046644480:13 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], pathId map=Logs, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:13:16.440357Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037900, followerId 0 2026-02-19T08:13:16.440383Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:13 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:13:16.440396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037900: SplitByLoadNotEnabledForTable 2026-02-19T08:13:16.440446Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:13:16.441275Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486394837660086:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:13:16.441296Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:13:16.441315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TSchemeshardForcedCompactionTest::SchemeshardShouldCompactMultipleTimes >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |90.5%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:17.872320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:17.872439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.872525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:17.872571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:17.872613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:17.872656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:17.872688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:17.872722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:17.872806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.872892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:17.878159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:17.878604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:18.091704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:18.091777Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:18.123569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:18.129884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:18.130102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:18.174424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:18.180378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:18.181223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:18.181596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:18.194936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:18.195175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:18.196504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:18.196582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:18.196706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:18.196759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:18.196805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:18.196994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:18.407318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.408945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.409030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.409106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.409196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.409274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.409352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 110 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:19.831764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 110:0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.832039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 110:1, propose status:StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", at schemeshard: 72057594046678944 2026-02-19T08:13:19.843784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 110, response: Status: StatusInvalidParameter Reason: "Last patrition 2 doesn\'t have the highest bound \"AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\"" TxId: 110 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:19.844162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 110, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2026-02-19T08:13:19.844554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2026-02-19T08:13:19.844607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2026-02-19T08:13:19.845102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-02-19T08:13:19.845202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-02-19T08:13:19.845245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:1010:2929] TestWaitNotification: OK eventTxId 110 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } TestModificationResults wait txId: 112 2026-02-19T08:13:19.848686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:19.848937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 112:0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.849162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 112:1, propose status:StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, at schemeshard: 72057594046678944 2026-02-19T08:13:19.852138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 112, response: Status: StatusInvalidParameter Reason: "Only 1 root partitions has new bounds, required: 3" TxId: 112 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:19.852468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 112, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-02-19T08:13:19.852869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-02-19T08:13:19.852959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-02-19T08:13:19.859372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-02-19T08:13:19.859583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-02-19T08:13:19.859644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1017:2936] TestWaitNotification: OK eventTxId 112 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } TestModificationResults wait txId: 114 2026-02-19T08:13:19.862910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:19.863193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 114:0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.863391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-02-19T08:13:19.866476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:19.866768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 114, wait until txId: 114 TestWaitNotification wait txId: 114 2026-02-19T08:13:19.867189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 114: send EvNotifyTxCompletion 2026-02-19T08:13:19.867245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 114 2026-02-19T08:13:19.867699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2026-02-19T08:13:19.867800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-02-19T08:13:19.867849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:1024:2943] TestWaitNotification: OK eventTxId 114 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } TestModificationResults wait txId: 116 2026-02-19T08:13:19.871605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } } } TxId: 116 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:19.871910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 116:0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.872477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 116:1, propose status:StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", at schemeshard: 72057594046678944 2026-02-19T08:13:19.875225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 116, response: Status: StatusInvalidParameter Reason: "Partitions 0 and 0 have overlapped bounds at point \"-inf\"" TxId: 116 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:19.875528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 116, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 116, wait until txId: 116 TestWaitNotification wait txId: 116 2026-02-19T08:13:19.876521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 116: send EvNotifyTxCompletion 2026-02-19T08:13:19.876589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 116 2026-02-19T08:13:19.877093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-02-19T08:13:19.877239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-02-19T08:13:19.877287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:1031:2950] TestWaitNotification: OK eventTxId 116 >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |90.5%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:18.566428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:18.566519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:18.566581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:18.566620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:18.566653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:18.566687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:18.566735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:18.566771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:18.566869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:18.566941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:18.567734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:18.568039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:18.792134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:18.792200Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:18.816584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:18.821885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:18.822097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:18.840879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:18.846006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:18.846723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:18.847069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:18.850356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:18.850527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:18.851651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:18.851713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:18.851821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:18.851864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:18.851902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:18.852071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.320070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.321912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.322978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.323045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.323136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.323223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.323308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:13:20.839249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:13:20.839272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:13:20.839297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:13:20.839321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:13:20.839346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:13:20.839529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.839624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.839791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:13:20.839873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:20.839918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:13:20.839947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:13:20.839975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:13:20.840105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.840197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.840468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-02-19T08:13:20.840673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:13:20.841062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.841238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.841723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.841839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.842893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.843438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.843751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.843844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.843913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.844106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.844160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.844214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.858981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:20.863761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:20.863839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:20.864120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:20.864171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:20.864224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:20.867269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1071:2973] sender: [1:1132:2058] recipient: [1:15:2062] 2026-02-19T08:13:20.954489Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:20.954801Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 613us result status StatusSuccess 2026-02-19T08:13:20.955559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:17.691868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:17.691985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.692072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:17.692131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:17.692176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:17.692218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:17.692251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:17.692280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:17.692380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.692469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:17.693442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:17.694288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:17.797588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:17.797688Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:17.832386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:17.838797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:17.839067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:17.864892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:17.870359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:17.871361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:17.871842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:17.884669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.884941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:17.886537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:17.886631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.886769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:17.886828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:17.886886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:17.887092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:18.388476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.389550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.389668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.398951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:18.399037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2026-02-19T08:13:21.179184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2026-02-19T08:13:21.179210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2026-02-19T08:13:21.179235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-02-19T08:13:21.179260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-02-19T08:13:21.179314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-02-19T08:13:21.179343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-02-19T08:13:21.179368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-02-19T08:13:21.179394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-02-19T08:13:21.179418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-02-19T08:13:21.179446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-02-19T08:13:21.179471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-02-19T08:13:21.179496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-02-19T08:13:21.179520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-02-19T08:13:21.179546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-02-19T08:13:21.179574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-02-19T08:13:21.179601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-02-19T08:13:21.179626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-02-19T08:13:21.179652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-02-19T08:13:21.179677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-02-19T08:13:21.179702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-02-19T08:13:21.179742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-02-19T08:13:21.179770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-02-19T08:13:21.179794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:13:21.179818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:13:21.179844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:13:21.179869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:13:21.179896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:13:21.179922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:13:21.179946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:13:21.179970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:13:21.179994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:13:21.180020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:13:21.180169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.180255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.180360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:13:21.180414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:21.180457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:13:21.180482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:13:21.180503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:13:21.180611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.180691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.180894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-02-19T08:13:21.181068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:13:21.181537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.181648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.182257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.182341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.182667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.182839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.182900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.183090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.183314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.183406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.183972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.184670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.196539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:21.203211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:21.203293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.206336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:21.206418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:21.206487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:21.209263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:17.523827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:17.523919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.523980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:17.524035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:17.524079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:17.524114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:17.524145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:17.524182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:17.524260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.524341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:17.525245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:17.525600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:17.632915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:17.632982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:17.659440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:17.660616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:17.660827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:17.683417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:17.684201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:17.684946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:17.685337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:17.690018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.690231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:17.691443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:17.691505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.691641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:17.691688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:17.691732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:17.691907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:17.852945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.854947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.855014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.855091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... on_pq.cpp:581: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:13:21.292112Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:13:21.292268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:13:21.292329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:21.294867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.295199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:21.295233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:13:21.295372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:13:21.295553Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.295589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-02-19T08:13:21.295624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:13:21.296055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.296101Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:13:21.296206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:13:21.296247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:21.296286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:13:21.296316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:21.296365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:13:21.296412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:13:21.296453Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:13:21.296486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:13:21.296624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:13:21.296660Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-02-19T08:13:21.296692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:13:21.296718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:13:21.297626Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:21.297766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:21.297847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:13:21.297887Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:13:21.297927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:13:21.298903Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:21.298977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:13:21.299004Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:13:21.299030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:13:21.299061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:21.299125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:13:21.301840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:13:21.302756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:13:21.302934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:13:21.302971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:13:21.303288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:13:21.303381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:13:21.303415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:984:2913] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } TestModificationResults wait txId: 105 2026-02-19T08:13:21.306513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:13:21.306813Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.307044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:21.309242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:13:21.309498Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-02-19T08:13:21.309835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:13:21.309881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-02-19T08:13:21.310265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:13:21.310357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:13:21.310392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:991:2920] TestWaitNotification: OK eventTxId 105 >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:17.655839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:17.655921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.655979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:17.656013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:17.656046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:17.656091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:17.656119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:17.656158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:17.656221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:17.656314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:17.657446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:17.657747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:17.752785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:17.752840Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:17.771798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:17.772572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:17.772784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:17.784620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:17.785401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:17.786318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:17.786659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:17.790020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.790229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:17.791368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:17.791428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:17.791533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:17.791577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:17.791632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:17.791807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:17.953158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.955649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.955837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.955915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.955981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.956693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:17.957113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... an, step: 300, at tablet: 72057594046678944 2026-02-19T08:13:21.231740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:706: NPQState::TPropose operationId# 107:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-02-19T08:13:21.260764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-02-19T08:13:21.260978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-02-19T08:13:21.261261Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:576: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-02-19T08:13:21.261317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:228: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.261357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:13:21.261541Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 107:0 Propose -> Done 2026-02-19T08:13:21.269805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:21.276767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.277059Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:21.277372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:13:21.277649Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.277691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 107, path id: 39 2026-02-19T08:13:21.278027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.278083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 107:0 ProgressState 2026-02-19T08:13:21.278189Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:13:21.278230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:13:21.278274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:13:21.278311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:13:21.278366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-02-19T08:13:21.278398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:13:21.278450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:13:21.278486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:0 2026-02-19T08:13:21.278603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:13:21.278642Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 1 2026-02-19T08:13:21.278675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-02-19T08:13:21.279419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:13:21.279523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:13:21.279571Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:13:21.279611Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:13:21.279669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:13:21.279766Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 1 2026-02-19T08:13:21.279813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:818:2783] 2026-02-19T08:13:21.282512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:13:21.282611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:13:21.282649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:1092:3006] TestWaitNotification: OK eventTxId 107 2026-02-19T08:13:21.283313Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:21.283553Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 264us result status StatusSuccess 2026-02-19T08:13:21.284307Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |90.6%| [LD] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:18.702633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:18.702740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:18.702820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:18.702877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:18.702934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:18.702975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:18.703020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:18.703063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:18.703169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:18.703251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:18.704178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:18.704524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:18.968161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:18.968225Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:18.983806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:18.984494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:18.984785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:18.998517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:18.999370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:19.000251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:19.000622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:19.004539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.004850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:19.006127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:19.006210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.006348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:19.006404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:19.006452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:19.006679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.228262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.229359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.229521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.229616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.229687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.229875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.230653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... lPathId: 27] was 0 2026-02-19T08:13:22.723387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:13:22.723409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:13:22.723431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:13:22.723452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:13:22.723484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:13:22.723516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:13:22.723539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:13:22.723561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:13:22.723584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:13:22.723611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:13:22.723798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.723911Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.724025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:13:22.724068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:22.724104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:13:22.724127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:13:22.724152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:13:22.724279Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.724376Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.724604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-02-19T08:13:22.724794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:13:22.725174Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.725310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.725771Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.725860Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726209Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726425Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726483Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726582Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726766Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.726887Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.727410Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.727739Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.727830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.727890Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.728047Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.728108Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.728171Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:13:22.740203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:22.747838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:22.747933Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:22.749000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:22.749072Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:22.749132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:22.759614Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1136:3041] sender: [2:1197:2058] recipient: [2:15:2062] 2026-02-19T08:13:22.855035Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:13:22.855441Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 366us result status StatusSuccess 2026-02-19T08:13:22.856301Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationTests::Create |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |90.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> KqpVectorIndexes::VectorResolveDuplicateEvent [GOOD] >> TReplicationTests::Disabled |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::VectorResolveDuplicateEvent [GOOD] Test command err: 2026-02-19T08:12:52.580464Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486343702975038:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.581068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:52.890406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:52.890511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:52.949754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.001834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486343702974982:2081] 1771488772567501 != 1771488772567504 2026-02-19T08:12:53.029276Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.030908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.194328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.194355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.194362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.194443Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:53.224310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.593831Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:53.782768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:53.794923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:53.868449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.072095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.332515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.415449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.515694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486360882846054:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.515800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.516089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486360882846064:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.516123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.156588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.211353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.294145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.347543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.426314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.499074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.564190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.578535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486343702975038:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.597061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:57.643448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.807692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365177814237:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.807817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.808320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365177814243:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.808370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365177814244:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.808481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.812579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:57.835384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486365177814247:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:57.934735Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486365177814298:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:00.120572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... pe: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2026-02-19T08:13:22.607437Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:22.607567Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:22.607812Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976715675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976710757 } Internal: true } TxId: 281474976710766 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:13:22.609241Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976715675, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:22.609384Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710766 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:13:22.610262Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:22.610381Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:22.742515Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2026-02-19T08:13:22.742804Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2026-02-19T08:13:22.743046Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2026-02-19T08:13:22.881193Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976715675, txId# 281474976710766 2026-02-19T08:13:22.881371Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2026-02-19T08:13:22.882079Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:13:22.882211Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:22.882280Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:13:22.882736Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done 2026-02-19T08:13:22.882852Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: emb, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:3346:4609], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 26 UploadBytes: 464 ReadRows: 50 ReadBytes: 630 CpuTimeUs: 7898, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:13:22.882929Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715675, subscribers count# 1 2026-02-19T08:13:22.883267Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715675 2026-02-19T08:13:22.883593Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index1" index_columns: "emb" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 9 } EndTime { seconds: 13 } UserSID: "" } >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |90.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TReplicationTests::Disabled [GOOD] >> TReplicationTests::CreateSequential >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge |90.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::ConsistencyLevel |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> Backup::ProposeBackup >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> EvWrite::WriteWithSplit >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::ReadWithProgramLike >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |90.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |90.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteReadNoCompression |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:33.313038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:33.313183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.313287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:33.313334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:33.313367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:33.313444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:33.313490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:33.313521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:33.313584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.313666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:33.314529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.314893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:33.443547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:33.443643Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:33.444533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.463849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:33.463981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:33.464194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:33.476961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:33.477778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:33.478605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.479040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:33.483834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.484018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:33.485982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:33.486054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.486322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:33.486367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:33.486406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:33.486705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.495440Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:33.664464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:33.664755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.665002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:33.665057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:33.665261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:33.665320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:33.669193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.669430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:33.669647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.669737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:33.669789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:33.669845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:33.675900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.676022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:33.676066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:33.677666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.677725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.677836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.677887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:33.680947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:33.683609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:33.683906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:33.685019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.685174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:33.685245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.685597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:33.685657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:33.685862Z node 1 :FLAT_TX_SCH ... 3, at schemeshard: 72057594046678944, txId: 253 2026-02-19T08:13:28.120535Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 180], version: 5 2026-02-19T08:13:28.120581Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 180] was 2 2026-02-19T08:13:28.121103Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-02-19T08:13:28.121196Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-02-19T08:13:28.121234Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 253 2026-02-19T08:13:28.121270Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 181], version: 2 2026-02-19T08:13:28.121310Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 181] was 2 2026-02-19T08:13:28.122292Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-02-19T08:13:28.122383Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-02-19T08:13:28.122420Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 253 2026-02-19T08:13:28.122455Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 178], version: 2 2026-02-19T08:13:28.122490Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 178] was 2 2026-02-19T08:13:28.122568Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 253, subscribers: 0 2026-02-19T08:13:28.126448Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-02-19T08:13:28.126622Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-02-19T08:13:28.126733Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-02-19T08:13:28.126987Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-02-19T08:13:28.128781Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 TestModificationResult got TxId: 253, wait until txId: 253 TestWaitNotification wait txId: 253 2026-02-19T08:13:28.131258Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 253: send EvNotifyTxCompletion 2026-02-19T08:13:28.131314Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 253 2026-02-19T08:13:28.133786Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 253, at schemeshard: 72057594046678944 2026-02-19T08:13:28.133916Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 253: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.133973Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 253: satisfy waiter [32:4243:6228] TestWaitNotification: OK eventTxId 253 TestWaitNotification wait txId: 245 2026-02-19T08:13:28.136118Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 245: send EvNotifyTxCompletion 2026-02-19T08:13:28.136171Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 245 TestWaitNotification wait txId: 246 2026-02-19T08:13:28.136272Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 246: send EvNotifyTxCompletion 2026-02-19T08:13:28.136302Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 246 TestWaitNotification wait txId: 247 2026-02-19T08:13:28.136368Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 247: send EvNotifyTxCompletion 2026-02-19T08:13:28.136397Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 247 TestWaitNotification wait txId: 248 2026-02-19T08:13:28.136461Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 248: send EvNotifyTxCompletion 2026-02-19T08:13:28.136492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 248 TestWaitNotification wait txId: 249 2026-02-19T08:13:28.136558Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 249: send EvNotifyTxCompletion 2026-02-19T08:13:28.136588Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 249 TestWaitNotification wait txId: 250 2026-02-19T08:13:28.136652Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 250: send EvNotifyTxCompletion 2026-02-19T08:13:28.136682Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 250 TestWaitNotification wait txId: 251 2026-02-19T08:13:28.136749Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 251: send EvNotifyTxCompletion 2026-02-19T08:13:28.136779Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 251 TestWaitNotification wait txId: 252 2026-02-19T08:13:28.136841Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 252: send EvNotifyTxCompletion 2026-02-19T08:13:28.136873Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 252 2026-02-19T08:13:28.139923Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 245, at schemeshard: 72057594046678944 2026-02-19T08:13:28.140488Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 246, at schemeshard: 72057594046678944 2026-02-19T08:13:28.140579Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 245: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.140624Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 245: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.140982Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 247, at schemeshard: 72057594046678944 2026-02-19T08:13:28.141169Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 246: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.141209Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 246: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.141414Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 248, at schemeshard: 72057594046678944 2026-02-19T08:13:28.141543Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 249, at schemeshard: 72057594046678944 2026-02-19T08:13:28.141675Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 247: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.141724Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 247: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.141893Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 250, at schemeshard: 72057594046678944 2026-02-19T08:13:28.142058Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 251, at schemeshard: 72057594046678944 2026-02-19T08:13:28.142125Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 248: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.142159Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 248: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.142330Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 249: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.142368Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 249: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.142543Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 252, at schemeshard: 72057594046678944 2026-02-19T08:13:28.142609Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 250: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.142642Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 250: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.142802Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 251: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.142837Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 251: satisfy waiter [32:4246:6231] 2026-02-19T08:13:28.143024Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 252: got EvNotifyTxCompletionResult 2026-02-19T08:13:28.143060Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 252: satisfy waiter [32:4246:6231] TestWaitNotification: OK eventTxId 245 TestWaitNotification: OK eventTxId 246 TestWaitNotification: OK eventTxId 247 TestWaitNotification: OK eventTxId 248 TestWaitNotification: OK eventTxId 249 TestWaitNotification: OK eventTxId 250 TestWaitNotification: OK eventTxId 251 TestWaitNotification: OK eventTxId 252 >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> EvWrite::AbortInTransaction [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> KqpVectorIndexes::OrderByCosineLevel2+Nullable-UseSimilarity [GOOD] >> KqpVectorIndexes::OrderByCosineLevel2+Nullable+UseSimilarity ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2026-02-19T08:13:32.078683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:32.199767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:32.200050Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:32.224646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:32.224920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:32.225175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:32.225312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:32.225429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:32.225550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:32.230007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:32.230206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:32.230352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:32.230479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.230612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:32.230723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:32.230884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:32.300231Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:32.300405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:32.300458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:32.300660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.300848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:32.300956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:32.300999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:32.301091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:32.301146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:32.301233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:32.301267Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:32.301457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.301531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:32.301573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:32.301621Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:32.301739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:32.301795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:32.301838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:32.301871Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:32.301928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:32.301995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:32.302039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:32.302083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:32.302127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:32.302164Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:32.302397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:32.302449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:32.302486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:32.302618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:32.302660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.302687Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.302732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:32.302776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:32.302804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:32.302876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:32.302914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:32.302943Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:32.303059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:32.303122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.450977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.451134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:13:33.451326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.451528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.451832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:370:2382] finished for tablet 9437184 2026-02-19T08:13:33.452387Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:369:2381];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":2394940,"name":"_full_task","f":2394940,"d_finished":0,"c":0,"l":2404032,"d":9092},"events":[{"name":"bootstrap","f":2395169,"d_finished":1080,"c":1,"l":2396249,"d":1080},{"a":2403266,"name":"ack","f":2401524,"d_finished":1633,"c":1,"l":2403157,"d":2399},{"a":2403254,"name":"processing","f":2396372,"d_finished":3685,"c":3,"l":2403160,"d":4463},{"name":"ProduceResults","f":2395844,"d_finished":2773,"c":6,"l":2403704,"d":2773},{"a":2403708,"name":"Finish","f":2403708,"d_finished":0,"c":0,"l":2404032,"d":324},{"name":"task_result","f":2396385,"d_finished":2003,"c":2,"l":2401363,"d":2003}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.452455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:369:2381];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:33.452988Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:369:2381];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":2394940,"name":"_full_task","f":2394940,"d_finished":0,"c":0,"l":2404653,"d":9713},"events":[{"name":"bootstrap","f":2395169,"d_finished":1080,"c":1,"l":2396249,"d":1080},{"a":2403266,"name":"ack","f":2401524,"d_finished":1633,"c":1,"l":2403157,"d":3020},{"a":2403254,"name":"processing","f":2396372,"d_finished":3685,"c":3,"l":2403160,"d":5084},{"name":"ProduceResults","f":2395844,"d_finished":2773,"c":6,"l":2403704,"d":2773},{"a":2403708,"name":"Finish","f":2403708,"d_finished":0,"c":0,"l":2404653,"d":945},{"name":"task_result","f":2396385,"d_finished":2003,"c":2,"l":2401363,"d":2003}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.883ms;WaitDuration:0ms;RawBytesRead:760], [StepName: PREPARE_RESULT; ExecutionDuration:0.557ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.13ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.054ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.345ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.991ms;WaitDuration:0ms;RawBytesRead:760]) ; 2026-02-19T08:13:33.453083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:33.440928Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-02-19T08:13:33.453115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:33.453224Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2026-02-19T08:13:30.827201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.860691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.860922Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.868870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.869109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.869345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.869492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.869634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.869782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.869965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.870138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.870281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.870408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.870538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.870638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.870770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.907064Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.907275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.907388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.907572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.907734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.907817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.907859Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.907949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.908033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.908082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.908111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.908310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.908384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.908442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.908478Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.908572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.908647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.908701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.908737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.908798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.908839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.908865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.908913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.908949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.908973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.909229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.909295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.909329Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.909486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.909538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.909594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.909645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.909725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.909774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.909829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.909881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.909916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.910176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.910230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 3:33.991171Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1771488814181:max} readable: {1771488814181:max} at tablet 9437184 2026-02-19T08:13:33.991329Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-02-19T08:13:33.991575Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488814181:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-02-19T08:13:33.991663Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488814181:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-02-19T08:13:33.992503Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488814181:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2026-02-19T08:13:33.998960Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488814181:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:13:34.050955Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488814181:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[2:182:2194];trace_detailed=; 2026-02-19T08:13:34.051843Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2;column_names=field,key;);; 2026-02-19T08:13:34.052057Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2026-02-19T08:13:34.052424Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.052608Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.052808Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:13:34.053063Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.053227Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.053481Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [2:182:2194] finished for tablet 9437184 2026-02-19T08:13:34.057059Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[2:181:2193];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3927277,"name":"_full_task","f":3927277,"d_finished":0,"c":0,"l":3929970,"d":2693},"events":[{"name":"bootstrap","f":3927574,"d_finished":1509,"c":1,"l":3929083,"d":1509},{"a":3929214,"name":"ack","f":3929214,"d_finished":0,"c":0,"l":3929970,"d":756},{"a":3929191,"name":"processing","f":3929191,"d_finished":0,"c":0,"l":3929970,"d":779},{"name":"ProduceResults","f":3928686,"d_finished":811,"c":2,"l":3929685,"d":811},{"a":3929690,"name":"Finish","f":3929690,"d_finished":0,"c":0,"l":3929970,"d":280}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.057207Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[2:181:2193];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:34.057685Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[2:181:2193];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":3927277,"name":"_full_task","f":3927277,"d_finished":0,"c":0,"l":3933688,"d":6411},"events":[{"name":"bootstrap","f":3927574,"d_finished":1509,"c":1,"l":3929083,"d":1509},{"a":3929214,"name":"ack","f":3929214,"d_finished":0,"c":0,"l":3933688,"d":4474},{"a":3929191,"name":"processing","f":3929191,"d_finished":0,"c":0,"l":3933688,"d":4497},{"name":"ProduceResults","f":3928686,"d_finished":811,"c":2,"l":3929685,"d":811},{"a":3929690,"name":"Finish","f":3929690,"d_finished":0,"c":0,"l":3933688,"d":3998}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:13:34.057812Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:33.998904Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:13:34.057874Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:34.058017Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=200; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex >> THiveTest::TestFollowers >> THiveTest::TestDrain >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |90.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs >> KqpVectorIndexes::CoveredVectorIndexWithFollowers+StaleRO [GOOD] >> KqpVectorIndexes::CoveredVectorIndexWithFollowers-StaleRO >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> DqUnboxedValueToNativeArrowConversion::Struct [GOOD] >> DqUnboxedValueToNativeArrowConversion::Tuple >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> DqUnboxedValueToNativeArrowConversion::Tuple [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalListOfOptional [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> Channels20::LocalChannelBackPressure >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> EvWrite::WriteWithSplit [GOOD] >> Normalizers::ChunksV0MetaNormalizer >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2026-02-19T08:13:30.656455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.688906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.689149Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.697731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.698017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.698268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.698389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.698508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.698623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.698730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.698830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.698934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.699049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.699203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.699309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.699409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.729850Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.730055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.730125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.730344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.730528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.730622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.730676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.730791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.730859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.730910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.730950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.731169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.731263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.731309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.731340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.731441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.731492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.731538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.731566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.731629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.731669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.731728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.731783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.731833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.731875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.732103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.732168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.732212Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.732375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.732443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.732475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.732523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.732563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.732597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.732646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.732706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.732742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.735409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.735497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s:(bytes=8388608;chunks=1);; 2026-02-19T08:13:38.006774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.483ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.942ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.938ms;WaitDuration:0ms;RawBytesRead:4064], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.225ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.255ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.201ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.106ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:9.469ms;WaitDuration:0ms;RawBytesRead:4064]) ; 2026-02-19T08:13:38.007105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.483ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.942ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.938ms;WaitDuration:0ms;RawBytesRead:4064], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.225ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.255ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.201ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.106ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:9.469ms;WaitDuration:0ms;RawBytesRead:4064]) ; 2026-02-19T08:13:38.007641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1057:2924] finished for tablet 9437184 2026-02-19T08:13:38.008371Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":8267256,"name":"_full_task","f":8267256,"d_finished":0,"c":0,"l":8286220,"d":18964},"events":[{"name":"bootstrap","f":8267632,"d_finished":2290,"c":1,"l":8269922,"d":2290},{"a":8284887,"name":"ack","f":8282150,"d_finished":2515,"c":1,"l":8284665,"d":3848},{"a":8284873,"name":"processing","f":8270161,"d_finished":6019,"c":3,"l":8284669,"d":7366},{"name":"ProduceResults","f":8269257,"d_finished":4351,"c":6,"l":8285640,"d":4351},{"a":8285647,"name":"Finish","f":8285647,"d_finished":0,"c":0,"l":8286220,"d":573},{"name":"task_result","f":8270182,"d_finished":3441,"c":2,"l":8281879,"d":3441}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.483ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.942ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.938ms;WaitDuration:0ms;RawBytesRead:4064], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.225ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.255ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.201ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.106ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:9.469ms;WaitDuration:0ms;RawBytesRead:4064]) ; 2026-02-19T08:13:38.008468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:38.009097Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":8267256,"name":"_full_task","f":8267256,"d_finished":0,"c":0,"l":8287019,"d":19763},"events":[{"name":"bootstrap","f":8267632,"d_finished":2290,"c":1,"l":8269922,"d":2290},{"a":8284887,"name":"ack","f":8282150,"d_finished":2515,"c":1,"l":8284665,"d":4647},{"a":8284873,"name":"processing","f":8270161,"d_finished":6019,"c":3,"l":8284669,"d":8165},{"name":"ProduceResults","f":8269257,"d_finished":4351,"c":6,"l":8285640,"d":4351},{"a":8285647,"name":"Finish","f":8285647,"d_finished":0,"c":0,"l":8287019,"d":1372},{"name":"task_result","f":8270182,"d_finished":3441,"c":2,"l":8281879,"d":3441}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.483ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.942ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.938ms;WaitDuration:0ms;RawBytesRead:4064], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.225ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.142ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.255ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.201ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.106ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:9.469ms;WaitDuration:0ms;RawBytesRead:4064]) ; 2026-02-19T08:13:38.009227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:37.984964Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-02-19T08:13:38.009272Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:38.009539Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2026-02-19T08:13:31.821503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:31.901173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:31.901420Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:31.922077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:31.922432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:31.922706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:31.922839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:31.922978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:31.923082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:31.923194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:31.923293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:31.923431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:31.923567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.923709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:31.923827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:31.923935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:32.012377Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:32.012559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:32.012631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:32.012870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.013057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:32.013144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:32.013191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:32.013290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:32.013373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:32.013431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:32.013476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:32.013682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.013779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:32.013834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:32.013871Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:32.013970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:32.014049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:32.014091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:32.014119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:32.014186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:32.014266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:32.014303Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:32.014356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:32.014411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:32.014463Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:32.014707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:32.014771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:32.014801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:32.014976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:32.015024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.015056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.015108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:32.015155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:32.015194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:32.015266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:32.015309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:32.015349Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:32.015500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:32.015557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.421ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:4.07ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:3.852ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.943ms;WaitDuration:0ms;RawBytesRead:2440], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.14ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.248ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.143ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:13.308ms;WaitDuration:0ms;RawBytesRead:2440]) ; 2026-02-19T08:13:39.960867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.421ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:4.07ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:3.852ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.943ms;WaitDuration:0ms;RawBytesRead:2440], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.14ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.248ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.143ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:13.308ms;WaitDuration:0ms;RawBytesRead:2440]) ; 2026-02-19T08:13:39.961407Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1057:2924] finished for tablet 9437184 2026-02-19T08:13:39.962114Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.022},{"events":["l_ack","l_processing","l_Finish"],"t":0.023}],"full":{"a":9031029,"name":"_full_task","f":9031029,"d_finished":0,"c":0,"l":9054029,"d":23000},"events":[{"name":"bootstrap","f":9031337,"d_finished":1776,"c":1,"l":9033113,"d":1776},{"a":9052725,"name":"ack","f":9049926,"d_finished":2570,"c":1,"l":9052496,"d":3874},{"a":9052708,"name":"processing","f":9033301,"d_finished":8592,"c":3,"l":9052500,"d":9913},{"name":"ProduceResults","f":9032547,"d_finished":4255,"c":6,"l":9053446,"d":4255},{"a":9053453,"name":"Finish","f":9053453,"d_finished":0,"c":0,"l":9054029,"d":576},{"name":"task_result","f":9033317,"d_finished":5957,"c":2,"l":9049668,"d":5957}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.421ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:4.07ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:3.852ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.943ms;WaitDuration:0ms;RawBytesRead:2440], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.14ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.248ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.143ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:13.308ms;WaitDuration:0ms;RawBytesRead:2440]) ; 2026-02-19T08:13:39.962209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:39.962841Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.022},{"events":["l_ack","l_processing","l_Finish"],"t":0.023}],"full":{"a":9031029,"name":"_full_task","f":9031029,"d_finished":0,"c":0,"l":9054798,"d":23769},"events":[{"name":"bootstrap","f":9031337,"d_finished":1776,"c":1,"l":9033113,"d":1776},{"a":9052725,"name":"ack","f":9049926,"d_finished":2570,"c":1,"l":9052496,"d":4643},{"a":9052708,"name":"processing","f":9033301,"d_finished":8592,"c":3,"l":9052500,"d":10682},{"name":"ProduceResults","f":9032547,"d_finished":4255,"c":6,"l":9053446,"d":4255},{"a":9053453,"name":"Finish","f":9053453,"d_finished":0,"c":0,"l":9054798,"d":1345},{"name":"task_result","f":9033317,"d_finished":5957,"c":2,"l":9049668,"d":5957}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.421ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:4.07ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:3.852ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.943ms;WaitDuration:0ms;RawBytesRead:2440], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.14ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.248ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:1.143ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:13.308ms;WaitDuration:0ms;RawBytesRead:2440]) ; 2026-02-19T08:13:39.962983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:39.935557Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2026-02-19T08:13:39.963030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:39.963300Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin >> ConvertUnboxedValueToArrowAndBack::Tuple >> ConvertUnboxedValueToArrowAndBack::Tuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverTupleWithOptionals [GOOD] >> DqOutputChannelTests::PartialRead [GOOD] >> DqOutputChannelTests::Overflow [GOOD] >> DqOutputChannelTests::BigRow >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> DqOutputChannelTests::BigRow [GOOD] >> DqOutputChannelTests::ChunkSizeLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2026-02-19T08:13:33.130218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:33.163282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:33.163491Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:33.171557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:33.171831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:33.172088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:33.172227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:33.172340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:33.172475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:33.172588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:33.172690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:33.172804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:33.172958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:33.173110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:33.173224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:33.173343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:33.210844Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:33.211004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:33.211055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:33.211254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:33.211406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:33.211487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:33.211528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:33.211624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:33.211688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:33.211777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:33.211829Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:33.212039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:33.212106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:33.212163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:33.212199Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:33.212285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:33.212333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:33.212369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:33.212396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:33.212459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:33.212493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:33.212535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:33.212582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:33.212626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:33.212659Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:33.212865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:33.212929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:33.212961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:33.213096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:33.213139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:33.213167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:33.213210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:33.213250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:33.213279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:33.213319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:33.213353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:33.213381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:33.213495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:33.213556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 40.815549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.403ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.018ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.12ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.913ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.244ms;WaitDuration:0ms;RawBytesRead:4720], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.192ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.145ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.242ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:0.456ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:7.995ms;WaitDuration:0ms;RawBytesRead:4720]) ; 2026-02-19T08:13:40.815892Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.403ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.018ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.12ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.913ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.244ms;WaitDuration:0ms;RawBytesRead:4720], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.192ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.145ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.242ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:0.456ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:7.995ms;WaitDuration:0ms;RawBytesRead:4720]) ; 2026-02-19T08:13:40.816448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1057:2924] finished for tablet 9437184 2026-02-19T08:13:40.817167Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":8490160,"name":"_full_task","f":8490160,"d_finished":0,"c":0,"l":8507125,"d":16965},"events":[{"name":"bootstrap","f":8490446,"d_finished":1929,"c":1,"l":8492375,"d":1929},{"a":8505756,"name":"ack","f":8502868,"d_finished":2654,"c":1,"l":8505522,"d":4023},{"a":8505737,"name":"processing","f":8492573,"d_finished":5970,"c":3,"l":8505527,"d":7358},{"name":"ProduceResults","f":8491733,"d_finished":4407,"c":6,"l":8506537,"d":4407},{"a":8506545,"name":"Finish","f":8506545,"d_finished":0,"c":0,"l":8507125,"d":580},{"name":"task_result","f":8492596,"d_finished":3243,"c":2,"l":8502605,"d":3243}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.403ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.018ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.12ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.913ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.244ms;WaitDuration:0ms;RawBytesRead:4720], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.192ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.145ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.242ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:0.456ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:7.995ms;WaitDuration:0ms;RawBytesRead:4720]) ; 2026-02-19T08:13:40.817256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:40.818028Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":8490160,"name":"_full_task","f":8490160,"d_finished":0,"c":0,"l":8507907,"d":17747},"events":[{"name":"bootstrap","f":8490446,"d_finished":1929,"c":1,"l":8492375,"d":1929},{"a":8505756,"name":"ack","f":8502868,"d_finished":2654,"c":1,"l":8505522,"d":4805},{"a":8505737,"name":"processing","f":8492573,"d_finished":5970,"c":3,"l":8505527,"d":8140},{"name":"ProduceResults","f":8491733,"d_finished":4407,"c":6,"l":8506537,"d":4407},{"a":8506545,"name":"Finish","f":8506545,"d_finished":0,"c":0,"l":8507907,"d":1362},{"name":"task_result","f":8492596,"d_finished":3243,"c":2,"l":8502605,"d":3243}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: ALLOCATE_MEMORY::FILTER; ExecutionDuration:0.403ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.018ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREPARE_RESULT; ExecutionDuration:2.12ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:1.913ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.244ms;WaitDuration:0ms;RawBytesRead:4720], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.192ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.145ms;WaitDuration:0ms;RawBytesRead:0], [StepName: FETCHING_COLUMNS; ExecutionDuration:0.253ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PREDICATE; ExecutionDuration:0.242ms;WaitDuration:0ms;RawBytesRead:0], [StepName: ASSEMBLER::PREDICATE; ExecutionDuration:0.456ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:7.995ms;WaitDuration:0ms;RawBytesRead:4720]) ; 2026-02-19T08:13:40.818192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:40.796252Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-02-19T08:13:40.818255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:40.818558Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TReplicationTests::CopyReplicatedTable [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> DqOutputWideChannelTests::SingleRead [GOOD] >> DqOutputWideChannelTests::PartialRead [GOOD] >> DqOutputWideChannelTests::PopAll [GOOD] >> DqOutputWideChannelWithStorageTests::Spill [GOOD] >> DqOutputWideChannelWithStorageTests::Overflow [GOOD] >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::ChunkSizeLimit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] >> THiveTest::TestIdempotentDrain >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval [GOOD] >> DqUnboxedValueDoNotFitToArrow::OptionalOfOptional [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:25.528838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:25.528948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:25.529012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:25.529053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:25.529088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:25.529126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:25.529160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:25.529208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:25.529282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:25.529447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:25.530341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:25.530694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:25.626852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:25.626912Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:25.642803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:25.643601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:25.643800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:25.657287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:25.658043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:25.658787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:25.659153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:25.663201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:25.663386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:25.664670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:25.664736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:25.664910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:25.664962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:25.665008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:25.665189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:25.835972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.837935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.838012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.838064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.838110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.838181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.838279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... SHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:13:44.007420Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:44.007480Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:13:44.007541Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:44.007601Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:13:44.007661Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:13:44.007832Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:13:44.007895Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:13:44.007948Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:13:44.007998Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:13:44.008839Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274137603, Sender [10:212:2212], Recipient [10:129:2154]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 10 } 2026-02-19T08:13:44.008887Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5383: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-02-19T08:13:44.008961Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:44.009048Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:44.009090Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:13:44.009153Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:13:44.009219Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:13:44.009332Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:13:44.010304Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274137603, Sender [10:212:2212], Recipient [10:129:2154]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 } 2026-02-19T08:13:44.010355Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5383: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-02-19T08:13:44.010430Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:44.010525Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:13:44.010563Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:13:44.010596Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:13:44.010631Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:13:44.010735Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:13:44.010814Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:13:44.011467Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435084, Sender [10:129:2154], Recipient [10:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:13:44.011518Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5494: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:13:44.011598Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:13:44.011657Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:13:44.011764Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:13:44.013542Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:13:44.015324Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:13:44.015369Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:13:44.015676Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:13:44.015709Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:13:44.016805Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:13:44.017095Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:13:44.017156Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:13:44.017652Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [10:853:2817], Recipient [10:129:2154]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:13:44.017736Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:13:44.017793Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:13:44.017920Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [10:800:2764], Recipient [10:129:2154]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-02-19T08:13:44.017961Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:13:44.018069Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:13:44.018195Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:13:44.018257Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:851:2815] 2026-02-19T08:13:44.018508Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [10:853:2817], Recipient [10:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:13:44.018552Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:13:44.018600Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-02-19T08:13:44.019041Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [10:854:2818], Recipient [10:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:13:44.019117Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:13:44.019256Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:13:44.019493Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 251us result status StatusPathDoesNotExist 2026-02-19T08:13:44.019700Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:25.298539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:25.298629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:25.298695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:25.298729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:25.298764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:25.298819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:25.298849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:25.298897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:25.298983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:25.299048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:25.299925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:25.300228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:25.393214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:25.393281Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:25.407798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:25.408644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:25.408850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:25.424602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:25.425453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:25.426245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:25.426579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:25.430542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:25.430723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:25.431986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:25.432046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:25.432151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:25.432195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:25.432234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:25.432417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:25.588660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.589929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:25.590406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 823 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.557549Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:13:44.557867Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 823 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.557973Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:13:44.558161Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 823 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.558305Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:44.558385Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1094: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-02-19T08:13:44.582358Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.583232Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.607334Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 42949675665 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.607417Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:13:44.607572Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 42949675665 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.607632Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:13:44.607727Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 42949675665 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:13:44.607804Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:44.607864Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.607933Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:13:44.607988Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:13:44.608049Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:13:44.613349Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.613995Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.614099Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-02-19T08:13:44.614187Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-02-19T08:13:44.614261Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-02-19T08:13:44.614386Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-02-19T08:13:44.614488Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Done -> Done 2026-02-19T08:13:44.622910Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:13:44.623005Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:13:44.623206Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:13:44.623265Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:44.623323Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:13:44.623373Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:44.623443Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:13:44.623543Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [10:747:2724] message: TxId: 102 2026-02-19T08:13:44.623643Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:13:44.623710Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:13:44.623762Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:13:44.623980Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:13:44.624038Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:13:44.628153Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:13:44.628247Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:853:2811] TestWaitNotification: OK eventTxId 102 2026-02-19T08:13:44.629389Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:13:44.629875Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 598us result status StatusSuccess 2026-02-19T08:13:44.630551Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> Channels20::IcChannelLateBinding |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> KqpVectorIndexes::OrderByCosineOnlyVectorCovered-Nullable-Overlap [GOOD] >> KqpVectorIndexes::OrderByCosineOnlyVectorCovered-Nullable+Overlap >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TScaleRecommenderTest::BasicTest >> KqpScheme::CreateAndAlterTableWithPartitionBy >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> KqpVectorIndexes::OrderByCosineLevel2WithCover+Nullable [GOOD] >> KqpVectorIndexes::OrderByCosineLevel2WithCover-Nullable >> KqpScheme::QueryWithAlter >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> KqpScheme::InvalidationAfterDropCreate >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> KqpScheme::CreateTableWithWrongPartitionAtKeys >> Channels20::LocalChannelBackPressure [GOOD] >> Channels20::LocalChannelAsyncRead >> THiveTest::TestIdempotentDrain [GOOD] >> THiveTest::TestLocalDisconnect >> TScaleRecommenderTest::BasicTest [GOOD] >> TScaleRecommenderTest::RollingRestart >> KqpOlapScheme::DropTable >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2026-02-19T08:13:31.167325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:31.205659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:31.206079Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:31.214569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:31.214868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:31.215120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:31.215242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:31.215355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:31.215480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:31.215619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:31.215735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:31.215839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:31.215963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.216090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:31.216238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:31.216350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:31.251168Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:31.251362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:31.251427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:31.251632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:31.251804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:31.251907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:31.251955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:31.252051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:31.252125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:31.252174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:31.252207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:31.252420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:31.252491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:31.252535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:31.252574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:31.252675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:31.252749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:31.252801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:31.252831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:31.252896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:31.252943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:31.252999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:31.253062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:31.253114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:31.253155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:31.253393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:31.253463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:31.253502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:31.253666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:31.254504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.254583Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.254662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:31.254713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:31.254751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:31.254821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:31.254867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:31.254912Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:31.255142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:31.255191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... SULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.035902Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.036056Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:13:46.036224Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.036424Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.036856Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [2:320:2320] finished for tablet 9437184 2026-02-19T08:13:46.037457Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[2:318:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.009},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.01},{"events":["f_ack"],"t":0.09},{"events":["l_task_result"],"t":1.452},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.455}],"full":{"a":14078900,"name":"_full_task","f":14078900,"d_finished":0,"c":0,"l":15534553,"d":1455653},"events":[{"name":"bootstrap","f":14079211,"d_finished":10034,"c":1,"l":14089245,"d":10034},{"a":15533670,"name":"ack","f":14169572,"d_finished":610917,"c":421,"l":15533573,"d":611800},{"a":15533659,"name":"processing","f":14089502,"d_finished":1274019,"c":843,"l":15533576,"d":1274913},{"name":"ProduceResults","f":14088677,"d_finished":1122421,"c":1266,"l":15534084,"d":1122421},{"a":15534098,"name":"Finish","f":15534098,"d_finished":0,"c":0,"l":15534553,"d":455},{"name":"task_result","f":14089522,"d_finished":648068,"c":422,"l":15531547,"d":648068}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.037547Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[2:318:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:46.038475Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[2:318:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.009},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.01},{"events":["f_ack"],"t":0.09},{"events":["l_task_result"],"t":1.452},{"events":["l_ProduceResults","f_Finish"],"t":1.455},{"events":["l_ack","l_processing","l_Finish"],"t":1.456}],"full":{"a":14078900,"name":"_full_task","f":14078900,"d_finished":0,"c":0,"l":15535223,"d":1456323},"events":[{"name":"bootstrap","f":14079211,"d_finished":10034,"c":1,"l":14089245,"d":10034},{"a":15533670,"name":"ack","f":14169572,"d_finished":610917,"c":421,"l":15533573,"d":612470},{"a":15533659,"name":"processing","f":14089502,"d_finished":1274019,"c":843,"l":15533576,"d":1275583},{"name":"ProduceResults","f":14088677,"d_finished":1122421,"c":1266,"l":15534084,"d":1122421},{"a":15534098,"name":"Finish","f":15534098,"d_finished":0,"c":0,"l":15535223,"d":1125},{"name":"task_result","f":14089522,"d_finished":648068,"c":422,"l":15531547,"d":648068}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:32.087ms;WaitDuration:0ms;RawBytesRead:2488696], [StepName: PREPARE_RESULT; ExecutionDuration:13.146ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.132ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.433ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:69.306ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:115.134ms;WaitDuration:0ms;RawBytesRead:2488696]) ; 2026-02-19T08:13:46.038637Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:44.579780Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-02-19T08:13:46.038700Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:46.038915Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous >> TScaleRecommenderTest::RollingRestart [GOOD] >> TScaleRecommenderTest::RollingRestartNoLastRecommendation >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin >> PhantomBlobs::TestTwoDeadAllAliveInactiveMirror3dcMemoryLimit10_KB |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-02-19T08:10:33.661575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:33.661675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.661769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:33.661815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:33.661858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:33.661925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:33.661962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:33.661996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:33.662088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:33.662173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:33.663094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.663424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:33.811140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:33.811238Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:33.812138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:33.834706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:33.834941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:33.835151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:33.854011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:33.854463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:33.855313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:33.856073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:33.859499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.859689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:33.860953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:33.861025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:33.861217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:33.861276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:33.861324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:33.861593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:33.869110Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2026-02-19T08:10:34.025568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:34.025913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.026197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:34.026261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:34.026540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:34.026629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:34.028856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.029106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:34.029326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.029403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:34.029463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:34.029554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:34.031444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.031509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:34.031636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:34.033603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.033653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.033732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.033859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:34.037921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:34.040236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:34.040462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:34.041671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.041840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:34.041916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.042254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:34.042341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.042549Z node 1 :FLAT_TX_SCH ... as 4 2026-02-19T08:13:45.127327Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-02-19T08:13:45.127360Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-02-19T08:13:45.127397Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-02-19T08:13:45.127425Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-02-19T08:13:45.127454Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-02-19T08:13:45.130659Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.130780Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.130825Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:13:45.130875Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-02-19T08:13:45.130918Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-02-19T08:13:45.131915Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.132016Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.132057Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:13:45.132094Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-02-19T08:13:45.132134Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-02-19T08:13:45.136114Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.136349Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.136395Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:13:45.136448Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-02-19T08:13:45.136492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-02-19T08:13:45.137684Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.137815Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:13:45.137855Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:13:45.137986Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-02-19T08:13:45.138049Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-02-19T08:13:45.138159Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-02-19T08:13:45.141913Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:13:45.142122Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:13:45.144536Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:13:45.144731Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-02-19T08:13:45.146378Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-02-19T08:13:45.146429Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-02-19T08:13:45.148943Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-02-19T08:13:45.149102Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.149151Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:5311:6861] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-02-19T08:13:45.150938Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-02-19T08:13:45.150982Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-02-19T08:13:45.151057Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-02-19T08:13:45.151080Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-02-19T08:13:45.151128Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-02-19T08:13:45.151150Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-02-19T08:13:45.151198Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-02-19T08:13:45.151218Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-02-19T08:13:45.151265Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-02-19T08:13:45.151282Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-02-19T08:13:45.153236Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-02-19T08:13:45.153432Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.153470Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:5314:6864] 2026-02-19T08:13:45.154104Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-02-19T08:13:45.154218Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-02-19T08:13:45.154313Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.154350Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:5314:6864] 2026-02-19T08:13:45.154431Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-02-19T08:13:45.154546Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.154577Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:5314:6864] 2026-02-19T08:13:45.154719Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.154748Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:5314:6864] 2026-02-19T08:13:45.154822Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-02-19T08:13:45.154978Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-02-19T08:13:45.155010Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:5314:6864] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TScaleRecommenderTest::RollingRestartNoLastRecommendation [GOOD] >> TStorageBalanceTest::TestScenario1 >> KqpScheme::CreateAndAlterTableWithPartitionBy [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> Channels20::IcChannelLateBinding [GOOD] >> Channels20::IcChannelAsyncRead >> KqpOlapScheme::DropTable [GOOD] >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert >> THiveTest::TestLimitedNodeList [GOOD] >> KqpScheme::InvalidationAfterDropCreate [GOOD] >> KqpScheme::InvalidationAfterDropCreateCompatSchema >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> THiveTest::TestLockTabletExecution >> Channels20::LocalChannelAsyncRead [GOOD] >> Channels20::IcChannelTrivial >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:34.093893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:34.094016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:34.094096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:34.094149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:34.094184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:34.094218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:34.094249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:34.094291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:34.094384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:34.094461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:34.095307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:34.095642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:34.221366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:34.221451Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:34.222285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:34.233002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:34.233109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:34.233286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:34.244837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:34.245948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:34.246843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.247220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:34.252211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:34.252389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:34.254366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:34.254440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:34.254668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:34.254724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:34.254796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:34.255129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.262752Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:34.410879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:34.411213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.411456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:34.411512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:34.411766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:34.411864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:34.415137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.415416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:34.415619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.415703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:34.415745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:34.415803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:34.418623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.418694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:34.418787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:34.420603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.420659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:34.420763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.420817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:34.424612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:34.426530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:34.426706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:34.427631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:34.427766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:34.427824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.428075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:34.428127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:34.428266Z node 1 :FLAT_TX_SCH ... as 2 2026-02-19T08:13:47.910840Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:13:47.910876Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:13:47.910908Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:13:47.910934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-02-19T08:13:47.910959Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-02-19T08:13:47.913255Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.913359Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.913398Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:47.913441Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:13:47.913484Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:13:47.914309Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.914409Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.914445Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:47.914484Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:13:47.914526Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:13:47.916146Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.916245Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.916281Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:47.916317Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-02-19T08:13:47.916354Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:13:47.917504Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.917590Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:13:47.917625Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:13:47.917660Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-02-19T08:13:47.917694Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-02-19T08:13:47.917784Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:13:47.920678Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:47.920795Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:47.922742Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:13:47.922860Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:13:47.924278Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:13:47.924322Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:13:47.925939Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:13:47.926055Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.926097Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2679:4666] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:13:47.927391Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:13:47.927433Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:13:47.927517Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:13:47.927545Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:13:47.927606Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:13:47.927635Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:13:47.927697Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:13:47.927723Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:13:47.927804Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:13:47.927833Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:13:47.929766Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:13:47.929923Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.929970Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2682:4669] 2026-02-19T08:13:47.930406Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:13:47.930567Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:13:47.930620Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.930652Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2682:4669] 2026-02-19T08:13:47.930847Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:13:47.930903Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.930932Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2682:4669] 2026-02-19T08:13:47.931146Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:13:47.931207Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.931238Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2682:4669] 2026-02-19T08:13:47.931397Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:13:47.931437Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2682:4669] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> TSchemeshardForcedCompactionTest::SchemeshardShouldCompactAfterRestart >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous [GOOD] >> KqpScheme::CreateTableWithWrongPartitionAtKeys [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin >> KqpScheme::CreateTableWithUniqConstraintPublicApi >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> KqpScheme::QueryWithAlter [GOOD] >> KqpScheme::RenameTable+ColumnTable >> TSchemeshardForcedCompactionTest::SchemeshardShouldNotCompactSameTableSimultaneously >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |90.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> KqpOlapScheme::DropColumnOldSchemeBulkUpsert [GOOD] >> KqpOlapScheme::DropThenAddColumn >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2026-02-19T08:13:31.014589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:31.046655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:31.046875Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:31.054746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:31.054983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:31.055209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:31.055332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:31.055440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:31.055572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:31.055685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:31.055779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:31.055898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:31.056014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.056156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:31.056292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:31.056407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:31.089946Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:31.090152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:31.090223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:31.090431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:31.090592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:31.090682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:31.090727Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:31.090822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:31.090886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:31.090930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:31.090959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:31.091179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:31.091252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:31.091293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:31.091322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:31.091442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:31.091505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:31.091564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:31.091597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:31.091661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:31.091728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:31.091777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:31.091822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:31.091875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:31.091917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:31.092150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:31.092210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:31.092260Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:31.092406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:31.092451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.092481Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:31.092545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:31.092612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:31.092667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:31.092712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:31.092757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:31.092790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:31.092980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:31.093042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.633374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.434ms;WaitDuration:0ms;RawBytesRead:224], [StepName: PREPARE_RESULT; ExecutionDuration:0.24ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.252ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.031ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.633922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:13:57.634194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.434ms;WaitDuration:0ms;RawBytesRead:224], [StepName: PREPARE_RESULT; ExecutionDuration:0.24ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.252ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.031ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.634419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.434ms;WaitDuration:0ms;RawBytesRead:224], [StepName: PREPARE_RESULT; ExecutionDuration:0.24ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.252ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.031ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.634752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:3720:5725] finished for tablet 9437184 2026-02-19T08:13:57.635333Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:3713:5718];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.03},{"events":["f_ack"],"t":0.031},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.034}],"full":{"a":27224070,"name":"_full_task","f":27224070,"d_finished":0,"c":0,"l":27258679,"d":34609},"events":[{"name":"bootstrap","f":27224893,"d_finished":2018,"c":1,"l":27226911,"d":2018},{"a":27257767,"name":"ack","f":27255593,"d_finished":1680,"c":1,"l":27257273,"d":2592},{"a":27257749,"name":"processing","f":27227723,"d_finished":6372,"c":5,"l":27257275,"d":7302},{"name":"ProduceResults","f":27226423,"d_finished":3567,"c":8,"l":27258313,"d":3567},{"a":27258319,"name":"Finish","f":27258319,"d_finished":0,"c":0,"l":27258679,"d":360},{"name":"task_result","f":27227746,"d_finished":4576,"c":4,"l":27254449,"d":4576}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.434ms;WaitDuration:0ms;RawBytesRead:224], [StepName: PREPARE_RESULT; ExecutionDuration:0.24ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.252ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.031ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.635409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:3713:5718];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:13:57.635948Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:3713:5718];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.03},{"events":["f_ack"],"t":0.031},{"events":["l_ProduceResults","f_Finish"],"t":0.034},{"events":["l_ack","l_processing","l_Finish"],"t":0.035}],"full":{"a":27224070,"name":"_full_task","f":27224070,"d_finished":0,"c":0,"l":27259318,"d":35248},"events":[{"name":"bootstrap","f":27224893,"d_finished":2018,"c":1,"l":27226911,"d":2018},{"a":27257767,"name":"ack","f":27255593,"d_finished":1680,"c":1,"l":27257273,"d":3231},{"a":27257749,"name":"processing","f":27227723,"d_finished":6372,"c":5,"l":27257275,"d":7941},{"name":"ProduceResults","f":27226423,"d_finished":3567,"c":8,"l":27258313,"d":3567},{"a":27258319,"name":"Finish","f":27258319,"d_finished":0,"c":0,"l":27259318,"d":999},{"name":"task_result","f":27227746,"d_finished":4576,"c":4,"l":27254449,"d":4576}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.434ms;WaitDuration:0ms;RawBytesRead:224], [StepName: PREPARE_RESULT; ExecutionDuration:0.24ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.252ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.031ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.051ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.08ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.099ms;WaitDuration:0ms;RawBytesRead:224]) ; 2026-02-19T08:13:57.636055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:13:57.598596Z;index_granules=0;index_portions=3;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=8232;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8232;selected_rows=0; 2026-02-19T08:13:57.636118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:13:57.636256Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3720:5725];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=f0599d94-d6a11f1-bfdeed55-f75ac774; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeshardForcedCompactionTest::SchemeshardShouldNotCompactSameTableSimultaneously [GOOD] >> TSchemeshardForcedCompactionTest::ShouldNotCompactServerless >> KqpScheme::InvalidationAfterDropCreateCompatSchema [GOOD] >> KqpScheme::InvalidationAfterDropCreateTable2 >> KqpScheme::CreateTableWithUniqConstraintPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndex >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> Channels20::IcChannelAsyncRead [GOOD] >> Channels20::IcChannelEarlyFinish >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> Channels20::IcChannelTrivial [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink |90.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeshardForcedCompactionTest::ShouldNotCompactServerless [GOOD] >> KqpOlapScheme::DropThenAddColumn [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardForcedCompactionTest::ShouldNotCompactServerless [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:20.429411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:20.429503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:20.429605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:20.429643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:20.429684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:20.429855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:20.429922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:20.429980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:20.430058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:20.430169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:20.431113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:20.431471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:20.571423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:20.571486Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:20.598973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:20.606124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:20.606392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:20.651927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:20.654462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:20.655286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:20.655610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:20.660194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:20.660429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:20.661980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:20.662058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:20.662206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:20.662271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:20.662327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:20.662577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.829810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.831921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.832500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 04.798823Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-02-19T08:14:04.798889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-02-19T08:14:04.798914Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-02-19T08:14:04.798951Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:870:2822] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976720681 at schemeshard: 72075186233409546 2026-02-19T08:14:04.799026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [2:870:2822] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/Shared/.sys/resource_pools' 2026-02-19T08:14:04.799071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-02-19T08:14:04.799097Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-02-19T08:14:04.799142Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:870:2822] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976720683 at schemeshard: 72075186233409546 2026-02-19T08:14:04.799209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [2:870:2822] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/Shared/.sys/auth_permissions' 2026-02-19T08:14:04.799251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-02-19T08:14:04.799270Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-02-19T08:14:04.799316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-02-19T08:14:04.799335Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-02-19T08:14:04.799382Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:870:2822] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976720684 at schemeshard: 72075186233409546 2026-02-19T08:14:04.799455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [2:870:2822] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/Shared/.sys/top_partitions_one_minute' 2026-02-19T08:14:04.799489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [2:870:2822] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 105 2026-02-19T08:14:04.802221Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [2:1604:3461], Recipient [2:1025:2963]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/User/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:14:04.802295Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:14:04.802447Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-02-19T08:14:04.802854Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/User/Simple" took 364us result status StatusSuccess 2026-02-19T08:14:04.803808Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Simple" PathDescription { Self { Name: "Simple" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key1" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2147483647 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\377\377\377\177" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:14:04.804651Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1607:3464], Recipient [2:1269:3151]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:04.804702Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:04.804749Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186233409552, clientId# [2:1606:3463], serverId# [2:1607:3464], sessionId# [0:0:0] 2026-02-19T08:14:04.804883Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553169, Sender [2:1605:3462], Recipient [2:1269:3151]: NKikimrTxDataShard.TEvGetInfoRequest 2026-02-19T08:14:04.806100Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 280231936, Sender [2:1608:3465], Recipient [2:1025:2963]: NKikimrForcedCompaction.TEvCreateRequest TxId: 106 DatabaseName: "/MyRoot/User" Settings { source_path: "Simple" } 2026-02-19T08:14:04.806154Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5461: StateWork, processing event TEvForcedCompaction::TEvCreateRequest 2026-02-19T08:14:04.806276Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_forced_compaction__create.cpp:19: [72075186233409549][ForcedCompaction] TForcedCompaction::TTxCreate DoExecute TxId: 106 DatabaseName: "/MyRoot/User" Settings { source_path: "Simple" } 2026-02-19T08:14:04.806375Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_forced_compaction__create.cpp:123: [72075186233409549][ForcedCompaction] TForcedCompaction::TTxCreate DoComplete 2026-02-19T08:14:04.806410Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409549 2026-02-19T08:14:04.806545Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:1608:3465] msg type: 280231937 msg: NKikimrForcedCompaction.TEvCreateResponse TxId: 106 Status: PRECONDITION_FAILED Issues { message: "Forced compaction not allowed for serverless" severity: 1 } at schemeshard: 72075186233409549 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> KqpScheme::CreateAndAlterTableWithPartitioningBySizeCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> KqpScheme::CreateTableWithVectorIndex [GOOD] >> KqpScheme::CreateTableWithVectorIndexCovered >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> KqpScheme::InvalidationAfterDropCreateTable2 [GOOD] >> KqpScheme::FamilyColumnTest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] Test command err: 2026-02-19T08:13:40.808330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486549741240982:2065];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:40.808382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:40.971771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:13:41.775529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:41.775676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:41.988245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:41.988823Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:42.011808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:42.032392Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:42.061929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486549741240957:2081] 1771488820805555 != 1771488820805558 2026-02-19T08:13:42.298600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:42.396310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:42.396343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:42.396352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:42.396451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:42.900097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:42.957828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:43.257126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:43.539791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:43.634656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.810285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486549741240982:2065];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:45.810368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:45.902106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486571216079329:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.902232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.902658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486571216079339:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.902720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.317617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.355300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.392629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.430108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.463839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.508374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.550830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.604174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.698836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486575511047510:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.698924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486575511047515:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.698929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.699085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486575511047517:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.699147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.703569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:46.716595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486575511047519:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:13:46.790354Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486575511047570:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:49.385470Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486589530669151: ... State: Unknown -> Disconnected 2026-02-19T08:13:56.262311Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:56.279948Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:56.286032Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:13:56.289835Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:56.291328Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:56.406297Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:56.406329Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:56.406338Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:56.406418Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:56.592969Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:56.928785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:56.957305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:57.052583Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:57.065437Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:57.132406Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:57.189881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:57.284737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:59.997388Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486630930289906:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:59.997482Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:59.997775Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486630930289916:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:59.997831Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.070261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.121168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.172976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.237354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.340689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.390578Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.491962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.600873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:00.743530Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486635225258379:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.743623Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.743637Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486635225258384:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.743820Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608486635225258386:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.743867Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:00.747866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:00.767133Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608486635225258387:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:00.825675Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608486635225258467:4910] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:01.038783Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608486618045385641:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:01.038857Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:01.083437Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608486618034471166:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:01.083513Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:02.543327Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1408: UPDATE IGNORED EarlyFinished=0, PopBytes=6, [3:7608486643815193382:2482] from peer [4:7608486643804276327:2373] 2026-02-19T08:14:02.543375Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1408: UPDATE IGNORED EarlyFinished=0, PopBytes=7, [3:7608486643815193382:2482] from peer [4:7608486643804276327:2373] |90.6%| [TA] $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> KqpScheme::RenameTable+ColumnTable [GOOD] >> KqpScheme::RenameTable-ColumnTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:39.751205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:39.751308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:39.751398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:39.751450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:39.751530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:39.751572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:39.751622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:39.751661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:39.751734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:39.751807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:39.752669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:39.753036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:39.877104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:39.877184Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:39.878037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:39.891699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:39.891809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:39.891955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:39.907468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:39.908260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:39.909035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:39.909387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:39.913998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:39.914174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:39.915938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:39.916002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:39.916209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:39.916307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:39.916360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:39.916614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:39.927114Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:40.102256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:40.102491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:40.102739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:40.102796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:40.103026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:40.103126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:40.106785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:40.107071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:40.107295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:40.107373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:40.107410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:40.107489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:40.110667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:40.110740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:40.110817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:40.117482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:40.117557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:40.117642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:40.117728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:40.125293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:40.131865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:40.132115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:40.133157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:40.133317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:40.133369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:40.133656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:40.133705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:40.133887Z node 1 :FLAT_TX_SCH ... 72057594046678944, txId: 190, path id: 138 2026-02-19T08:14:04.636543Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:213:2212], at schemeshard: 72057594046678944, txId: 190, path id: 139 2026-02-19T08:14:04.636576Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:213:2212], at schemeshard: 72057594046678944, txId: 190, path id: 139 2026-02-19T08:14:04.636609Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:213:2212], at schemeshard: 72057594046678944, txId: 190, path id: 140 2026-02-19T08:14:04.636643Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [32:213:2212], at schemeshard: 72057594046678944, txId: 190, path id: 140 2026-02-19T08:14:04.638575Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.638700Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.638752Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:04.638800Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-02-19T08:14:04.638847Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-02-19T08:14:04.640468Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.640573Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.640615Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:04.640658Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-02-19T08:14:04.640699Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-02-19T08:14:04.641980Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.642097Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.642135Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:04.642172Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-02-19T08:14:04.642210Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-02-19T08:14:04.642738Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.642822Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:04.642858Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:04.642892Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-02-19T08:14:04.642930Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 2 2026-02-19T08:14:04.643003Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-02-19T08:14:04.647103Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:04.647241Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:04.647344Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:04.649244Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-02-19T08:14:04.650924Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-02-19T08:14:04.650982Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-02-19T08:14:04.652830Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-02-19T08:14:04.652949Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.652994Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:2940:4927] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-02-19T08:14:04.654365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-02-19T08:14:04.654416Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-02-19T08:14:04.654533Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-02-19T08:14:04.654564Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-02-19T08:14:04.654650Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-02-19T08:14:04.654682Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-02-19T08:14:04.654744Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-02-19T08:14:04.654772Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-02-19T08:14:04.654848Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-02-19T08:14:04.654875Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-02-19T08:14:04.657031Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-02-19T08:14:04.657273Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-02-19T08:14:04.657344Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.657389Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:2943:4930] 2026-02-19T08:14:04.657619Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-02-19T08:14:04.657753Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-02-19T08:14:04.657810Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.657842Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:2943:4930] 2026-02-19T08:14:04.658005Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-02-19T08:14:04.658063Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.658115Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:2943:4930] 2026-02-19T08:14:04.658232Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.658263Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:2943:4930] 2026-02-19T08:14:04.658434Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-02-19T08:14:04.658466Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:2943:4930] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> Channels20::IcChannelEarlyFinish [GOOD] >> Channels20::IcChannelBackPressure >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsert-Nullable-Covered [GOOD] >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsertWithOverlap+Covered >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo >> KqpPrefixedVectorIndexes::PrefixedVectorEmptyIndexedTableInsertWithOverlap-Covered [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteFilter+Covered >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeUncompat [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> KqpScheme::CreateTableWithVectorIndexCovered [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> KqpScheme::FamilyColumnTest [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffixWithOverlap+Covered [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> KqpScheme::CreateTableWithVectorIndexCaseIncentive >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffixWithOverlap-Covered >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser >> KqpScheme::Int8Int16 >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> KqpScheme::RenameTable-ColumnTable [GOOD] >> KqpScheme::PathWithNoRoot >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> Cdc::InitialScanUpdatedRows >> KqpPrefixedVectorIndexes::PrefixedVectorIndexTruncateTable+Covered+Overlap [GOOD] >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> KqpScheme::CreateAndAlterTableWithPartitionSizeCompat [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> KqpVectorIndexes::CoveredVectorIndexWithFollowers-StaleRO [GOOD] >> THiveTest::TestDownAfterDrain [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser [GOOD] >> Channels20::IcChannelBackPressure [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> KqpOlapScheme::DropThenAddColumnCompaction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> KqpScheme::Int8Int16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteFilter+Covered [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat >> KqpOlapScheme::DropTtlColumn >> THiveTest::TestHiveBalancerIgnoreTablet >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin >> Channels20::CaIntegrationTrivial >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> KqpScheme::Int8Int16Olap >> THiveTest::TestDrainAndReconnect >> KqpScheme::CreateTableWithVectorIndexCaseIncentive [GOOD] >> KqpScheme::CreateTableWithVectorIndexPublicApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:36.349532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:36.455553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:36.472721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:36.472793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:36.473192Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:36.762430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:36.762777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:36.846982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488752904787 != 1771488752904791 2026-02-19T08:12:36.862761Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:36.913633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:37.073944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:37.190123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:37.246703Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=MWE3NGZkNzItMjg2OGUyMzAtZWY1MTIyZDEtOWU1ZjJiYzA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MWE3NGZkNzItMjg2OGUyMzAtZWY1MTIyZDEtOWU1ZjJiYzA= temp_dir_name# 0d654a04-413a-426c-a748-71afa3410605 trace_id# 2026-02-19T08:12:37.247228Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:309} SessionId: ydb://session/3?node_id=1&id=MWE3NGZkNzItMjg2OGUyMzAtZWY1MTIyZDEtOWU1ZjJiYzA=, ActorId: [1:849:2743], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-02-19T08:12:37.247802Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:542} SessionId: ydb://session/3?node_id=1&id=MWE3NGZkNzItMjg2OGUyMzAtZWY1MTIyZDEtOWU1ZjJiYzA=, ActorId: [1:849:2743], ActorState: ReadyState, LegacyTraceId: 01khtfahfz5c9y1rkaztrm9r1k, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpc_actor# [0:0:0] database# database_id# /Root pool_id# default trace_id# 2026-02-19T08:12:37.456096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:852:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.456266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.456694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:870:2751], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.456774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:37.468651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.502210Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:891:2771], Recipient [1:899:2777]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:37.503158Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:891:2771], Recipient [1:899:2777]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:37.503419Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:899:2777] 2026-02-19T08:12:37.503643Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:37.512772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:891:2771], Recipient [1:899:2777]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:37.549215Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:37.549354Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:37.551042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:37.551140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:37.551193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:37.551548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:37.551710Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:37.551799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:915:2777] in generation 1 2026-02-19T08:12:37.552212Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:37.574917Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:37.575150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:37.575275Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:917:2787] 2026-02-19T08:12:37.575311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:37.575346Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:37.575381Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:37.575582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:899:2777], Recipient [1:899:2777]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:37.575630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:37.575980Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:37.576079Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:37.576155Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:37.576199Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:37.576266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:37.576304Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:37.576373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:37.576405Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:37.576445Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:37.576561Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:906:2781], Recipient [1:899:2777]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:37.576600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:37.576655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:906:2781], sessionId# [0:0:0] 2026-02-19T08:12:37.577040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:906:2781] 2026-02-19T08:12:37.577079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:37.577167Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:37.577402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:37.577468Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:37.577555Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:12:37.577612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:12:37.577647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:12:37.577678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSch ... 4:11.771223Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3243: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:14:11.771286Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:14:11.771428Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976710661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-02-19T08:14:11.771507Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit CheckDataTx 2026-02-19T08:14:11.771548Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is Executed 2026-02-19T08:14:11.771576Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit CheckDataTx 2026-02-19T08:14:11.771599Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:11.771633Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:14:11.771672Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-02-19T08:14:11.771729Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976710666] at 72075186224037888 2026-02-19T08:14:11.771766Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is Executed 2026-02-19T08:14:11.771797Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:11.771827Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710666] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:14:11.771857Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:14:11.771888Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is Executed 2026-02-19T08:14:11.771911Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:14:11.771931Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-02-19T08:14:11.771951Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit ExecuteKqpDataTx 2026-02-19T08:14:11.772022Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:274: Operation [0:281474976710666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-02-19T08:14:11.772152Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-02-19T08:14:11.772255Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:520: add locks to result: 0 2026-02-19T08:14:11.772340Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is Executed 2026-02-19T08:14:11.772371Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-02-19T08:14:11.772397Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710666] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:14:11.772424Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit FinishPropose 2026-02-19T08:14:11.772483Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976710666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:14:11.772575Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is DelayComplete 2026-02-19T08:14:11.772601Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:14:11.772623Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710666] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:11.772648Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710666] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:11.772692Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710666] at 72075186224037888 is Executed 2026-02-19T08:14:11.772725Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710666] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:11.772753Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976710666] at 72075186224037888 has finished 2026-02-19T08:14:11.772811Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:14:11.772839Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976710666] at 72075186224037888 on unit FinishPropose 2026-02-19T08:14:11.772878Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:11.774249Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [13:71:2118], Recipient [13:1110:2930]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710661 LockNode: 13 Status: STATUS_NOT_FOUND 2026-02-19T08:14:12.024301Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [13:1175:2974], Recipient [13:1110:2930]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:14:12.024536Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:14:12.024610Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-02-19T08:14:12.024675Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v600/18446744073709551615 2026-02-19T08:14:12.024766Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-02-19T08:14:12.024889Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:14:12.024973Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:14:12.025032Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:12.025087Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:14:12.025139Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-02-19T08:14:12.025194Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:14:12.025218Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:12.025239Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:14:12.025260Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:14:12.025381Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:14:12.025664Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[13:1175:2974], 0} after executionsCount# 1 2026-02-19T08:14:12.025777Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[13:1175:2974], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:14:12.025881Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[13:1175:2974], 0} finished in read 2026-02-19T08:14:12.025966Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:14:12.025991Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:14:12.026016Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:12.026043Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:12.026089Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:14:12.026124Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:12.026156Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-02-19T08:14:12.026212Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:14:12.026366Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:14:12.027727Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [13:1175:2974], Recipient [13:1110:2930]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:14:12.027804Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:21.511542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:21.511627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:21.511710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:21.511755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:21.511789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:21.511825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:21.511873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:21.511904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:21.511966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:21.512067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:21.512913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:21.513212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:21.604415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:21.604473Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:21.617119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:21.617762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:21.618012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:21.630376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:21.631627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:21.632552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:21.632852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:21.636404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.636605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:21.637977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:21.638042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.638157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:21.638206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:21.638259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:21.638443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.795425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.796585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.796719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.796797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.796930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.797661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... r [0:0:0], Recipient [4:1273:3154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-02-19T08:14:21.518251Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3242: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-02-19T08:14:21.518382Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 300 last cleanup 0 2026-02-19T08:14:21.518473Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:14:21.518522Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2026-02-19T08:14:21.518559Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2026-02-19T08:14:21.518595Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2026-02-19T08:14:21.518772Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [4:1273:3154]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:14:21.518933Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3537: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 32 2026-02-19T08:14:21.519297Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [4:1273:3154], Recipient [4:1610:3464]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 310 Memory: 119576 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 4 StartTime: 221 TableOwnerId: 72075186233409549 FollowerId: 0 2026-02-19T08:14:21.519348Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:21.519423Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.031 2026-02-19T08:14:21.519561Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:21.519603Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:14:21.530072Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [4:1278:3157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-02-19T08:14:21.530174Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3242: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-02-19T08:14:21.530288Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 300 last cleanup 0 2026-02-19T08:14:21.530362Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:14:21.530400Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2026-02-19T08:14:21.530443Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2026-02-19T08:14:21.530476Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2026-02-19T08:14:21.530621Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [4:1278:3157]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:14:21.530742Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3537: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 32 2026-02-19T08:14:21.531130Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [4:1278:3157], Recipient [4:1610:3464]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 286 Memory: 119576 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 4 StartTime: 221 TableOwnerId: 72075186233409549 FollowerId: 0 2026-02-19T08:14:21.531202Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:21.531260Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0286 2026-02-19T08:14:21.531467Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:21.542872Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:1610:3464]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:21.542929Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:21.542987Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:1610:3464], Recipient [4:1610:3464]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:21.543019Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:21.554015Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [4:1610:3464]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-02-19T08:14:21.554110Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5511: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-02-19T08:14:21.554432Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269746180, Sender [4:2096:3925], Recipient [4:1610:3464]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-02-19T08:14:21.554499Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5510: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-02-19T08:14:21.575773Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [4:2099:3928], Recipient [4:1273:3154]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.575857Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.575907Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186233409552, clientId# [4:2098:3927], serverId# [4:2099:3928], sessionId# [0:0:0] 2026-02-19T08:14:21.576088Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553213, Sender [4:2097:3926], Recipient [4:1273:3154]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-02-19T08:14:21.576551Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [4:2102:3931], Recipient [4:1273:3154]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.576594Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.576632Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186233409552, clientId# [4:2101:3930], serverId# [4:2102:3931], sessionId# [0:0:0] 2026-02-19T08:14:21.576739Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553213, Sender [4:2100:3929], Recipient [4:1273:3154]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-02-19T08:14:21.577075Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [4:2105:3934], Recipient [4:1278:3157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.577096Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.577117Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186233409553, clientId# [4:2104:3933], serverId# [4:2105:3934], sessionId# [0:0:0] 2026-02-19T08:14:21.577170Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553213, Sender [4:2103:3932], Recipient [4:1278:3157]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-02-19T08:14:21.577598Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [4:2108:3937], Recipient [4:1278:3157]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.577629Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:21.577651Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186233409553, clientId# [4:2107:3936], serverId# [4:2108:3937], sessionId# [0:0:0] 2026-02-19T08:14:21.577837Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553213, Sender [4:2106:3935], Recipient [4:1278:3157]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexTruncateTable+Covered+Overlap [GOOD] Test command err: 2026-02-19T08:11:56.142825Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486101706890865:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:56.142868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:56.197552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:56.539620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:56.539732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:56.544459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:56.585783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:56.680163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486101706890815:2081] 1771488716135090 != 1771488716135093 2026-02-19T08:11:56.693334Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:56.768588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:56.768610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:56.768618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:56.768711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:56.893448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:57.169855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:57.333032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:57.347110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:11:57.400269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.551476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.745662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.841813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.844052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486114591794582:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.844195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.847531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486114591794592:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:59.847662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.236011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.287691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.341988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.442875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.500082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.537042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.574466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.625810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.698913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118886762756:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.698999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.699222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118886762761:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.699271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118886762762:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.699294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.702790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:00.714942Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486118886762765:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:00.815513Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486118886762816:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:01.144272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486101706890865:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:01.144333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... ats.cpp:37: BuildStatsForCollector: datashardId 72075186224037930, followerId 0 2026-02-19T08:14:15.991022Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:44 with partCount# 1, rowCount# 60, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:15.991067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037930: SplitByLoadNotEnabledForTable 2026-02-19T08:14:15.991132Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:14:15.991254Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:15.991275Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:15.991284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:14:16.748551Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:16.748597Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:16.748657Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486153839225784:2149], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:16.748677Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:17.697068Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486171019096845:2442], Recipient [2:7608486153839225784:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037915 TableLocalId: 46 Generation: 1 Round: 11 TableStats { DataSize: 252 RowCount: 4 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488735066 LastUpdateTime: 1771488735066 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 252 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 37 Memory: 133639 Storage: 350 GroupWriteThroughput { GroupID: 2181038080 Channel: 1 Throughput: 48 } GroupWriteThroughput { GroupID: 2181038080 Channel: 0 Throughput: 18 } } ShardState: 2 UserTablePartOwners: 72075186224037915 NodeId: 2 StartTime: 1771488732637 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:17.697120Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:17.697162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037915 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] state 'Ready' dataSize 252 rowCount 4 cpuUsage 0.0037 2026-02-19T08:14:17.697282Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037915 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] raw table stats: DataSize: 252 RowCount: 4 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488735066 LastUpdateTime: 1771488735066 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 252 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:17.697308Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.099996s, queue# 1 2026-02-19T08:14:17.733692Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486171019096846:2443], Recipient [2:7608486153839225784:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037916 TableLocalId: 46 Generation: 1 Round: 11 TableStats { DataSize: 310 RowCount: 5 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488735062 LastUpdateTime: 1771488735062 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 5 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 310 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 38 Memory: 133639 Storage: 408 GroupWriteThroughput { GroupID: 2181038080 Channel: 1 Throughput: 48 } GroupWriteThroughput { GroupID: 2181038080 Channel: 0 Throughput: 18 } } ShardState: 2 UserTablePartOwners: 72075186224037916 NodeId: 2 StartTime: 1771488732637 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:17.733764Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:17.733800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037916 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] state 'Ready' dataSize 310 rowCount 5 cpuUsage 0.0038 2026-02-19T08:14:17.733899Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037916 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 46] raw table stats: DataSize: 310 RowCount: 5 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488735062 LastUpdateTime: 1771488735062 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 5 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 310 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:17.748935Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:17.748981Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:17.749028Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486153839225784:2149], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:17.749048Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:17.797618Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:17.797675Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:17.797691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-02-19T08:14:17.797753Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 2 2026-02-19T08:14:17.797771Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-02-19T08:14:17.797826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 46 shard idx 72057594046644480:28 data size 252 row count 4 2026-02-19T08:14:17.797886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037915 maps to shardIdx: 72057594046644480:28 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], pathId map=Join1, is column=0, is olap=0, RowCount 4, DataSize 252 2026-02-19T08:14:17.797904Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037915, followerId 0 2026-02-19T08:14:17.797980Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:28 with partCount# 1, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:17.798030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037915: SplitByLoadNotEnabledForTable 2026-02-19T08:14:17.798067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 46 shard idx 72057594046644480:29 data size 310 row count 5 2026-02-19T08:14:17.798094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037916 maps to shardIdx: 72057594046644480:29 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], pathId map=Join1, is column=0, is olap=0, RowCount 5, DataSize 310 2026-02-19T08:14:17.798103Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037916, followerId 0 2026-02-19T08:14:17.798150Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:29 with partCount# 1, rowCount# 5, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:17.798164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037916: SplitByLoadNotEnabledForTable 2026-02-19T08:14:17.798210Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:14:17.801458Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486153839225784:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:17.801496Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:17.801510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:31.015400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:31.015473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:31.015546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:31.015584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:31.015615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:31.015667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:31.015700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:31.015729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:31.015816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:31.015896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:31.016677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:31.016977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:31.134438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:31.134519Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:31.135240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:31.150916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:31.151033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:31.151205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:31.161984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:31.162608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:31.163169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.163465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:31.168031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:31.168191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:31.170249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:31.170315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:31.170553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:31.170622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:31.170692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:31.171002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.178138Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:31.298699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:31.298930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.299109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:31.299157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:31.299330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:31.299377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:31.306854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.307187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:31.307412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.307484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:31.307524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:31.307575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:31.309596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.309672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:31.309748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:31.311193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.311242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:31.311329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.311378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:31.314910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:31.316576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:31.316742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:31.317692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:31.317863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:31.317906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.318095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:31.318127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:31.318267Z node 1 :FLAT_TX_SCH ... ablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 275382275 2026-02-19T08:14:16.191733Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:16.191817Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:16.191853Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:16.191886Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-02-19T08:14:16.191924Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 3 2026-02-19T08:14:16.193364Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:16.193431Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-02-19T08:14:16.193455Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-02-19T08:14:16.193480Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 4 2026-02-19T08:14:16.193507Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 4 2026-02-19T08:14:16.193567Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 190, ready parts: 2/3, is published: true 2026-02-19T08:14:16.196883Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:16.197300Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:16.197435Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:16.198805Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-02-19T08:14:16.211745Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 2026-02-19T08:14:16.211797Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 190, tablet: 72075186233409584, partId: 2 2026-02-19T08:14:16.211887Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 190:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 FAKE_COORDINATOR: Erasing txId 190 2026-02-19T08:14:16.214008Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 190:2, at schemeshard: 72057594046678944 2026-02-19T08:14:16.214258Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 190:2, at schemeshard: 72057594046678944 2026-02-19T08:14:16.214317Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 190:2 ProgressState 2026-02-19T08:14:16.214427Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-02-19T08:14:16.214464Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-02-19T08:14:16.214506Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-02-19T08:14:16.214542Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-02-19T08:14:16.214577Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 190, ready parts: 3/3, is published: true 2026-02-19T08:14:16.214641Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [31:308:2296] message: TxId: 190 2026-02-19T08:14:16.214691Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-02-19T08:14:16.214735Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:0 2026-02-19T08:14:16.214771Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 190:0 2026-02-19T08:14:16.214844Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-02-19T08:14:16.214874Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:1 2026-02-19T08:14:16.214890Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 190:1 2026-02-19T08:14:16.214917Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-02-19T08:14:16.214936Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:2 2026-02-19T08:14:16.214954Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 190:2 2026-02-19T08:14:16.215020Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-02-19T08:14:16.216965Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.217002Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [31:5674:7178] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-02-19T08:14:16.218605Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-02-19T08:14:16.218655Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-02-19T08:14:16.218735Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-02-19T08:14:16.218763Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-02-19T08:14:16.218831Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-02-19T08:14:16.218866Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-02-19T08:14:16.218927Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-02-19T08:14:16.218946Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-02-19T08:14:16.218992Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-02-19T08:14:16.219011Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-02-19T08:14:16.221108Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-02-19T08:14:16.221306Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.221349Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [31:5717:7221] 2026-02-19T08:14:16.221734Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-02-19T08:14:16.221940Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.221968Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [31:5717:7221] 2026-02-19T08:14:16.222039Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-02-19T08:14:16.222250Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-02-19T08:14:16.222299Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.222323Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [31:5717:7221] 2026-02-19T08:14:16.222456Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-02-19T08:14:16.222513Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.222534Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [31:5717:7221] 2026-02-19T08:14:16.222685Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-02-19T08:14:16.222712Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [31:5717:7221] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::CoveredVectorIndexWithFollowers-StaleRO [GOOD] Test command err: 2026-02-19T08:12:53.019789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486347667174772:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:53.019859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.486844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.525689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.532801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.602004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486343372207365:2081] 1771488772978628 != 1771488772978631 2026-02-19T08:12:53.631531Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.638411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.882961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.938017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.938038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.938049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.938158Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:54.093567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.597408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:54.710618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.971593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.229750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:55.346143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.779931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486364847045723:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.780056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.780491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486364847045733:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.780545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.022008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486347667174772:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:58.022067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:58.195940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.256312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.320932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.372516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.406584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.464030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.533083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.604435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:58.707463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486369142013905:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.707544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.707920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486369142013910:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.707956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486369142013911:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.708053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:58.711871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:58.728446Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486369142013914:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:58.799695Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486369142013967:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:00.646218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:01.081765Z no ... rd: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:42.048645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.152375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486548450123672:2221];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:45.152438Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:45.631619Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486569924961846:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.631728Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.632003Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486569924961856:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.632042Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:45.716678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.760879Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.808381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.855965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.901296Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:45.966507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.020760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.106328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:46.242198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486574219930025:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.242304Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.242841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486574219930030:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.242899Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.242918Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486574219930031:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:46.248778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:46.263538Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486574219930034:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:46.409145Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486574219930085:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:48.314567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.606590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.683094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:13:48.758815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:13:48.874348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:13:48.947521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:13:55.321405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:13:55.321437Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... SELECT from partition_stats for /Root/TestTable/index/indexImplPostingTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable/index/indexImplPostingTable' ... SELECT from partition_stats for /Root/TestTable/index/indexImplPostingTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable/index/indexImplPostingTable' ... SELECT from partition_stats for /Root/TestTable/index/indexImplLevelTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable/index/indexImplLevelTable' ... SELECT from partition_stats for /Root/TestTable/index/indexImplLevelTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable/index/indexImplLevelTable' ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' >> THiveTest::TestDrainAndReconnect [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2026-02-19T08:13:30.537426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.572754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.573044Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.581158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.581394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.581663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.581848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.581993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.582131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.582242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.582348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.582444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.582631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.582756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.582864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.582988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.623666Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.623842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.623899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.624107Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.624285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.624438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.624482Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.624674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.624744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.624795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.624844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.625058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.625142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.625186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.625220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.625322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.625377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.625425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.625454Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.625521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.625574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.625695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.625769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.625829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.625872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.626099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.626152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.626186Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.626344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.626389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.626420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.626471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.626509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.626540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.626586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.626628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.626670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.626872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.626923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... rd;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=76072; 2026-02-19T08:14:20.787156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-02-19T08:14:20.788044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=828; 2026-02-19T08:14:20.788098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=77277; 2026-02-19T08:14:20.788146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=77406; 2026-02-19T08:14:20.788207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-02-19T08:14:20.788457Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=205; 2026-02-19T08:14:20.788496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=78191; 2026-02-19T08:14:20.788645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=97; 2026-02-19T08:14:20.788757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2026-02-19T08:14:20.789160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=361; 2026-02-19T08:14:20.789531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=318; 2026-02-19T08:14:20.812577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=22941; 2026-02-19T08:14:20.838342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=25637; 2026-02-19T08:14:20.838467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-02-19T08:14:20.838524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-02-19T08:14:20.838569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-02-19T08:14:20.838651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-02-19T08:14:20.838698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-02-19T08:14:20.838788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-02-19T08:14:20.838831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:14:20.838912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-02-19T08:14:20.839006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-02-19T08:14:20.839089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-02-19T08:14:20.839128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=138618; 2026-02-19T08:14:20.839273Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=6064952;raw_bytes=5869450;count=1;records=75000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=6007352;raw_bytes=7088450;count=2;records=75200} inactive {blob_bytes=84829232;raw_bytes=89388100;count=214;records=1050200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:14:20.839385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:14:20.839445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:14:20.839513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:14:20.839558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:14:20.839755Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:14:20.839820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:14:20.839872Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=7; 2026-02-19T08:14:20.839935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488513335;tx_id=18446744073709551615;;current_snapshot_ts=1771488812003; 2026-02-19T08:14:20.839981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:14:20.840025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:20.840062Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:20.840176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:14:20.841506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.004000s; 2026-02-19T08:14:20.845968Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:14:20.846589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:14:20.846652Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:14:20.846739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=7; 2026-02-19T08:14:20.846829Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488513335;tx_id=18446744073709551615;;current_snapshot_ts=1771488812003; 2026-02-19T08:14:20.846896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:14:20.846948Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:20.846989Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:20.847067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:189;event=skip_actualization;waiting=0.999000s; 2026-02-19T08:14:20.847122Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:14:20.848014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.018000s; 2026-02-19T08:14:20.848065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:4172:6164];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::PrefixedVectorIndexDeleteFilter+Covered [GOOD] Test command err: 2026-02-19T08:11:56.483979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486101283655638:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:56.490993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:56.783991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:56.800461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:56.800575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:56.895284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:56.906217Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:56.907871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486101283655606:2081] 1771488716480986 != 1771488716480989 2026-02-19T08:11:57.079278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:57.085176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:57.085216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:57.085224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:57.085368Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:57.501964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:57.602066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:57.656577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.832753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.030718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.101776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.207717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118463526663:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.207849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.217992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486118463526672:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.218122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:00.637044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.675655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.711505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.780390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.810750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.848272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.883335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:00.969441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.045633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122758494848:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.045762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.047349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122758494851:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.047449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.047681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122758494855:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.051372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:01.067436Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486122758494857:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:01.132105Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486122758494908:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:01.484738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486101283655638:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:01.484803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:02.736604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486127053462508:3971], Recipient [1:7608486101283655961:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:02.736641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026- ... adShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710769 2026-02-19T08:14:21.493743Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:14:21.493854Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:14:21.494055Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:502: UnlockPropose 281474976715675 Unlocking Transaction { WorkingDir: "/Root" OperationType: ESchemeOpDropLock LockConfig { Name: "TestTable" } LockGuard { OwnerTxId: 281474976710757 } Internal: true } TxId: 281474976710769 TabletId: 72057594046644480 FailOnExist: true 2026-02-19T08:14:21.495298Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 281474976715675, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:14:21.495440Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 281474976715675, record: Status: StatusAccepted TxId: 281474976710769 SchemeshardId: 72057594046644480 PathId: 54, status: StatusAccepted 2026-02-19T08:14:21.496002Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:14:21.496107Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:14:21.498391Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976715675, txId# 281474976710769 2026-02-19T08:14:21.498537Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710769 2026-02-19T08:14:21.498839Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking 2026-02-19T08:14:21.498927Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Unlocking TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:14:21.498950Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:14:21.499238Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done 2026-02-19T08:14:21.499343Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976715675 Done TBuildInfo{ IndexBuildId: 281474976715675, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 54], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, DataColumns: emb, DataColumns: data, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7608486725502113049:2563], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 111 UploadBytes: 2814 ReadRows: 90 ReadBytes: 2610 CpuTimeUs: 8496, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:14:21.499378Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976715675, subscribers count# 1 2026-02-19T08:14:21.499759Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715675 2026-02-19T08:14:21.500102Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715675 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" data_columns: "emb" data_columns: "data" global_vector_kmeans_tree_index { } } max_shards_in_flight: 32 ScanSettings { } } Progress: 100 StartTime { seconds: 1771488861 } EndTime { seconds: 1771488861 } UserSID: "" } 2026-02-19T08:14:21.505023Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037936 not found 2026-02-19T08:14:21.505049Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037937 not found 2026-02-19T08:14:21.505058Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037935 not found 2026-02-19T08:14:21.505068Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found >> PhantomBlobs::TestDisabling |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> KqpVectorIndexes::OrderByCosineLevel2+Nullable+UseSimilarity [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> KqpScheme::PathWithNoRoot [GOOD] >> KqpScheme::RenameTableWithVectorIndex >> KqpOlapScheme::DropTtlColumn [GOOD] >> KqpOlapScheme::DropColumnErrors >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin >> DqUnboxedValueDoNotFitToArrow::LargeVariant [GOOD] >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits >> DqUnboxedValueToNativeArrowConversion::ListOfJsons [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::OrderByCosineLevel2+Nullable+UseSimilarity [GOOD] Test command err: 2026-02-19T08:12:44.145910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486308906075092:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:44.146072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:44.505799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:44.549605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:44.549779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:44.605977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:44.608396Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:44.802921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:44.803352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:44.803387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:44.803400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:44.803530Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:45.166028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:45.406778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:45.414727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:12:45.491496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.685082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.878332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:45.949264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:47.908022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486321790978802:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.908144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.908449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486321790978812:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:47.908501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.246514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.283733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.320246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.356004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.386759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.417620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.487840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.534493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:48.605260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486326085946986:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.605314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.605400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486326085946991:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.605476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486326085946993:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.605521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:48.608826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:48.618546Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486326085946995:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:12:48.704370Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486326085947046:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:49.145699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486308906075092:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:49.145781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:50.266583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486334675881943:3977], Recipient [1:7608486308906075390:2145]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:50.266626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:50 ... 2057594046644480, LocalPathId: 55] was 2 2026-02-19T08:14:25.016761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 56], at schemeshard: 72057594046644480 2026-02-19T08:14:25.016773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 1 2026-02-19T08:14:25.016782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 55], at schemeshard: 72057594046644480 2026-02-19T08:14:25.016795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 54] was 5 2026-02-19T08:14:25.017368Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486571249872369:4140], Recipient [2:7608486536890130968:2148]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037930 ClientId: [2:7608486571249872369:4140] ServerId: [2:7608486571249872377:4146] } 2026-02-19T08:14:25.017381Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:25.017389Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037930, from:72057594046644480 is reset 2026-02-19T08:14:25.017438Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486571249872373:4143], Recipient [2:7608486536890130968:2148]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037931 ClientId: [2:7608486571249872373:4143] ServerId: [2:7608486571249872384:4147] } 2026-02-19T08:14:25.017446Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:25.017452Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037931, from:72057594046644480 is reset 2026-02-19T08:14:25.017640Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2026-02-19T08:14:25.017668Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2026-02-19T08:14:25.018434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:43 2026-02-19T08:14:25.018452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:43 tabletId 72075186224037930 2026-02-19T08:14:25.018490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:44 2026-02-19T08:14:25.018508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:44 tabletId 72075186224037931 2026-02-19T08:14:25.018540Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:14:25.018642Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486743048568096:6307], Recipient [2:7608486536890130968:2148]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [2:7608486743048568096:6307] ServerId: [2:7608486743048568097:6308] } 2026-02-19T08:14:25.018660Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:25.018672Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-02-19T08:14:25.019066Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486614199546831:5053], Recipient [2:7608486536890130968:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:25.019090Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:25.019117Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:14:25.019209Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486614199546830:5052], Recipient [2:7608486536890130968:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:25.019229Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:25.019241Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:14:25.164028Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486571249872155:2543], Recipient [2:7608486536890130968:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037929 TableLocalId: 54 Generation: 1 Round: 3 TableStats { DataSize: 896 RowCount: 4 IndexSize: 0 InMemSize: 896 LastAccessTime: 1771488864904 LastUpdateTime: 1771488825467 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 54 RangeReads: 18 PartCount: 0 RangeReadRows: 54 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 709 Memory: 142616 } ShardState: 2 UserTablePartOwners: 72075186224037929 NodeId: 2 StartTime: 1771488825128 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:25.164075Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:25.164106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037929 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] state 'Ready' dataSize 896 rowCount 4 cpuUsage 0.0709 2026-02-19T08:14:25.164197Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037929 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] raw table stats: DataSize: 896 RowCount: 4 IndexSize: 0 InMemSize: 896 LastAccessTime: 1771488864904 LastUpdateTime: 1771488825467 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 54 RangeReads: 18 PartCount: 0 RangeReadRows: 54 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:25.164226Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.099996s, queue# 1 2026-02-19T08:14:25.164385Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486571249872152:2542], Recipient [2:7608486536890130968:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037928 TableLocalId: 54 Generation: 1 Round: 3 TableStats { DataSize: 704 RowCount: 2 IndexSize: 0 InMemSize: 704 LastAccessTime: 1771488864905 LastUpdateTime: 1771488825467 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 36 RangeReads: 18 PartCount: 0 RangeReadRows: 36 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 659 Memory: 142616 } ShardState: 2 UserTablePartOwners: 72075186224037928 NodeId: 2 StartTime: 1771488825128 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:25.164407Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:25.164423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037928 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] state 'Ready' dataSize 704 rowCount 2 cpuUsage 0.0659 2026-02-19T08:14:25.164508Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037928 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] raw table stats: DataSize: 704 RowCount: 2 IndexSize: 0 InMemSize: 704 LastAccessTime: 1771488864905 LastUpdateTime: 1771488825467 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 2 RowDeletes: 0 RowReads: 36 RangeReads: 18 PartCount: 0 RangeReadRows: 36 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:25.164650Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486571249872151:2541], Recipient [2:7608486536890130968:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037927 TableLocalId: 54 Generation: 1 Round: 3 TableStats { DataSize: 896 RowCount: 4 IndexSize: 0 InMemSize: 896 LastAccessTime: 1771488864905 LastUpdateTime: 1771488825466 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 48 RangeReads: 18 PartCount: 0 RangeReadRows: 54 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 654 Memory: 142616 } ShardState: 2 UserTablePartOwners: 72075186224037927 NodeId: 2 StartTime: 1771488825128 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:25.164663Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:25.164677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037927 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] state 'Ready' dataSize 896 rowCount 4 cpuUsage 0.0654 2026-02-19T08:14:25.164767Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037927 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 54] raw table stats: DataSize: 896 RowCount: 4 IndexSize: 0 InMemSize: 896 LastAccessTime: 1771488864905 LastUpdateTime: 1771488825466 ImmediateTxCompleted: 0 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 48 RangeReads: 18 PartCount: 0 RangeReadRows: 54 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:36.713944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:36.853588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:36.871204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:36.871277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:36.871767Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:37.213108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:37.213234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:37.301627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488753090050 != 1771488753090054 2026-02-19T08:12:37.310579Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:37.357912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:37.449511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:37.762848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:37.776423Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:37.876480Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:12:37.876544Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:12:37.876667Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:12:37.979271Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:12:37.979359Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:12:37.979993Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:12:37.980122Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:12:37.980460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:12:37.980584Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:12:37.980695Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:12:37.980978Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:12:37.982549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.983568Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:12:37.983651Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:12:38.014283Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:38.015226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:38.015525Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:12:38.015774Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:38.024844Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:38.060126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:38.060263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:38.061989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:38.062097Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:38.062150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:38.062508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:38.062655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:38.062748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:12:38.063189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:38.120646Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:38.120854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:38.120957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:12:38.121018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:38.121128Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:38.121171Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:38.121397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.121463Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.121798Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:38.121926Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:38.122004Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:38.122054Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:38.122096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:38.122134Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:38.122189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:38.122226Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:38.122285Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:38.122366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.122400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.122442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:12:38.122522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:12:38.122557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:38.122658Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:38.122895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:38.122958Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:38.123099Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... 075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710663 2026-02-19T08:14:25.307188Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2026-02-19T08:14:25.307230Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:14:25.307305Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [13:976:2830], Recipient [13:891:2770]: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:14:25.307336Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:14:25.307366Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976710663 2026-02-19T08:14:25.307415Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:14:25.307632Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976710663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1181:2967], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:14:25.307949Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [13:891:2770], Recipient [13:976:2830]: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:14:25.307985Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:14:25.308018Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710663 2026-02-19T08:14:25.308071Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:14:25.308179Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976710663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1181:2967], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1192 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976710663 } 2026-02-19T08:14:25.308986Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:25.309320Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976710663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1731 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976710663 } 2026-02-19T08:14:25.311030Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:25.319255Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:14:25.319507Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [13:891:2770], Recipient [13:976:2830]: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-02-19T08:14:25.319607Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:14:25.319685Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710663 2026-02-19T08:14:25.322182Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:14:25.347025Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [13:976:2830], Recipient [13:891:2770]: {TEvReadSet step# 4001 txid# 281474976710663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-02-19T08:14:25.347101Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:14:25.347145Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710663 2026-02-19T08:14:25.557877Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:14:25.557984Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:69:2116] TxId# 281474976710667 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2026-02-19T08:14:25.562992Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [13:1290:3075], Recipient [13:891:2770]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:14:25.563238Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:14:25.563340Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976710663 IncompleteEdge# v{min} UnprotectedReadEdge# v5000/18446744073709551615 ImmediateWriteEdge# v5001/0 ImmediateWriteEdgeReplied# v5001/0 2026-02-19T08:14:25.563418Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v5001/18446744073709551615 2026-02-19T08:14:25.563533Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-02-19T08:14:25.563689Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:14:25.563762Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:14:25.563833Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:25.563900Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:14:25.563964Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-02-19T08:14:25.564035Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:14:25.564067Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:25.564096Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:14:25.564123Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:14:25.564282Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:14:25.564727Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[13:1290:3075], 0} after executionsCount# 1 2026-02-19T08:14:25.564832Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[13:1290:3075], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:14:25.564943Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[13:1290:3075], 0} finished in read 2026-02-19T08:14:25.565048Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:14:25.565087Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:14:25.565119Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:25.565153Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:25.565210Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:14:25.565238Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:25.565275Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-02-19T08:14:25.565346Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:14:25.565535Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:14:25.567272Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [13:1290:3075], Recipient [13:891:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:14:25.567365Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed |90.7%| [TA] {RESULT} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadUncompat [GOOD] >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> Channels20::CaIntegrationTrivial [GOOD] >> Channels20::CaIntegrationAgg |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TSchemeshardForcedCompactionTest::SchemeshardShouldCompactMultipleTables >> KqpScripting::LimitOnShard >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpPragma::ResetPerQuery >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> KqpScripting::StreamExecuteYqlScriptData >> KqpOlapScheme::DropColumnErrors [GOOD] >> KqpOlapScheme::DropColumnTableStoreErrors >> KqpScheme::CreateTableWithVectorIndexPublicApi [GOOD] >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots >> KqpVectorIndexes::OrderByCosineLevel2WithCover-Nullable [GOOD] >> KqpScheme::Int8Int16Olap [GOOD] >> KqpScheme::DropTransfer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |90.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> KqpYql::DdlDmlMix >> KqpYql::ScriptUdf |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpScripting::ScanQueryInvalid |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |90.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::OrderByCosineLevel2WithCover-Nullable [GOOD] Test command err: 2026-02-19T08:12:52.778849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486343920134638:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.778901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:53.025795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.054899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.058015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.125846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486343920134599:2081] 1771488772776619 != 1771488772776622 2026-02-19T08:12:53.130740Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.136001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.271814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.326461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.326497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.326506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.326583Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:53.798072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:53.806070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:53.834688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:53.892461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.074720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.252588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.340129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.330757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486361100005661:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.330891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.337906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486361100005671:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.338054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.868005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.924412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.990610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.058884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.129144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.211845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.286503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.375447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.511441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365394973838:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.511529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.511793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365394973843:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.511843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486365394973844:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.511969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.516295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:57.532873Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486365394973847:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:57.616054Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486365394973898:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:57.781494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486343920134638:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.781613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:00.003489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486378279876116:3977], Recipient [1:7608486343920134952:2145]: NKikimr::TE ... hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:14:31.450672Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269551621, Sender [2:7608486608788512315:2563], Recipient [2:7608486583018705528:2151]: NKikimrTxDataShard.TEvStateChanged Source { RawX1: 7608486608788512315 RawX2: 4503608217307651 } TabletId: 72075186224037931 State: 4 2026-02-19T08:14:31.450685Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5331: StateWork, processing event TEvDataShard::TEvStateChanged 2026-02-19T08:14:31.450722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608486608788512315 RawX2: 4503608217307651 } TabletId: 72075186224037931 State: 4 2026-02-19T08:14:31.450753Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037931, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:14:31.450771Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:14:31.450899Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486767702306148:6294], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.450913Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.450921Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:14:31.451180Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877760, Sender [2:7608486767702306152:6297], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037968897 Status: OK ServerId: [2:7608486767702306154:6299] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-02-19T08:14:31.451197Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5394: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-02-19T08:14:31.451210Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6201: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046644480 2026-02-19T08:14:31.451241Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:14:31.451287Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [2:7608486608788512315:2563] msg type: 269552133 msg: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046644480 State: 4 at schemeshard: 72057594046644480 2026-02-19T08:14:31.451326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:14:31.451342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:43 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:14:31.451836Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486767702306150:6295], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.451852Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.451861Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:14:31.453519Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 268698118, Sender [2:7608486583018705529:2152], Recipient [2:7608486583018705528:2151]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 44 2026-02-19T08:14:31.453545Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvHive::TEvDeleteTabletReply 2026-02-19T08:14:31.453575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 44, at schemeshard: 72057594046644480 2026-02-19T08:14:31.453766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 57] was 1 2026-02-19T08:14:31.453943Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 268698118, Sender [2:7608486583018705529:2152], Recipient [2:7608486583018705528:2151]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 43 2026-02-19T08:14:31.453960Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvHive::TEvDeleteTabletReply 2026-02-19T08:14:31.454013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 43, at schemeshard: 72057594046644480 2026-02-19T08:14:31.454180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 56] was 1 2026-02-19T08:14:31.454288Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435084, Sender [2:7608486583018705528:2151], Recipient [2:7608486583018705528:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:14:31.454300Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5494: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:14:31.454316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:14:31.454327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 57], at schemeshard: 72057594046644480 2026-02-19T08:14:31.454371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 2 2026-02-19T08:14:31.454385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 56], at schemeshard: 72057594046644480 2026-02-19T08:14:31.454397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 1 2026-02-19T08:14:31.454405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 55], at schemeshard: 72057594046644480 2026-02-19T08:14:31.454416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 54] was 5 2026-02-19T08:14:31.456761Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486608788512332:4151], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037930 ClientId: [2:7608486608788512332:4151] ServerId: [2:7608486608788512340:4154] } 2026-02-19T08:14:31.456791Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:31.456799Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037930, from:72057594046644480 is reset 2026-02-19T08:14:31.456853Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486608788512337:4152], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037931 ClientId: [2:7608486608788512337:4152] ServerId: [2:7608486608788512342:4156] } 2026-02-19T08:14:31.456860Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:31.456866Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037931, from:72057594046644480 is reset 2026-02-19T08:14:31.456902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:44 2026-02-19T08:14:31.456913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:44 tabletId 72075186224037930 2026-02-19T08:14:31.456942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:43 2026-02-19T08:14:31.456953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:43 tabletId 72075186224037931 2026-02-19T08:14:31.456973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:14:31.457047Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2026-02-19T08:14:31.457060Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2026-02-19T08:14:31.458009Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [2:7608486767702306152:6297], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [2:7608486767702306152:6297] ServerId: [2:7608486767702306154:6299] } 2026-02-19T08:14:31.458028Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:14:31.458039Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-02-19T08:14:31.458311Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486673213023786:5323], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.458342Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.458355Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:14:31.458422Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [2:7608486651738186854:5086], Recipient [2:7608486583018705528:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.458435Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:14:31.458443Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 >> KqpVectorIndexes::OrderByCosineOnlyVectorCovered-Nullable+Overlap [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> KqpScheme::RenameTableWithVectorIndex [GOOD] >> KqpScheme::ResourcePoolClassifiersValidation >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> KqpYql::BinaryJsonOffsetBound ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/vector/unittest >> KqpVectorIndexes::OrderByCosineOnlyVectorCovered-Nullable+Overlap [GOOD] Test command err: 2026-02-19T08:12:52.671237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486342051626961:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:52.671290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:12:52.730728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:53.126197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:53.126288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:53.222305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:53.232735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:12:53.290795Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:53.292563Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486342051626918:2081] 1771488772646914 != 1771488772646917 2026-02-19T08:12:53.434574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:12:53.434602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:12:53.434609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:12:53.434731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:12:53.485834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:12:53.715188Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:54.038583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:54.046714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:12:54.149094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.285310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.474000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:54.584608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:56.814336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486359231497982:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.814495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.814992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486359231497992:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:56.815064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.130237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.194347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.255858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.312868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.395760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.457815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.517555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.601389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:57.679374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486342051626961:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:57.679829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:57.736377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486363526466161:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.736469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.736868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486363526466166:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.736893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486363526466167:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.736976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:57.740733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:57.757195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486363526466170:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:57.856927Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486363526466226:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges ... 491Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:42 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:33.510505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037929: SplitByLoadNotEnabledForTable 2026-02-19T08:14:33.510523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 54 shard idx 72057594046644480:41 data size 704 row count 2 2026-02-19T08:14:33.510543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037928 maps to shardIdx: 72057594046644480:41 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 54], pathId map=TestTable, is column=0, is olap=0, RowCount 2, DataSize 704 2026-02-19T08:14:33.510551Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037928, followerId 0 2026-02-19T08:14:33.510576Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:41 with partCount# 0, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:33.510588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037928: SplitByLoadNotEnabledForTable 2026-02-19T08:14:33.510637Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:14:33.510741Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486574169374565:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.510757Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.510764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:14:33.730518Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486604234148645:2562], Recipient [2:7608486574169374565:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037930 TableLocalId: 56 Generation: 1 Round: 3 TableStats { DataSize: 294 RowCount: 6 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488871369 LastUpdateTime: 1771488834057 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 6 RowDeletes: 0 RowReads: 0 RangeReads: 46 PartCount: 1 RangeReadRows: 64 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 294 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 787 Memory: 119814 Storage: 411 } ShardState: 2 UserTablePartOwners: 72075186224037930 NodeId: 2 StartTime: 1771488833702 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:33.730568Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:33.730607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037930 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 56] state 'Ready' dataSize 294 rowCount 6 cpuUsage 0.0787 2026-02-19T08:14:33.730721Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037930 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 56] raw table stats: DataSize: 294 RowCount: 6 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488871369 LastUpdateTime: 1771488834057 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 6 RowDeletes: 0 RowReads: 0 RangeReads: 46 PartCount: 1 RangeReadRows: 64 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 294 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:33.730746Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.099994s, queue# 1 2026-02-19T08:14:33.730954Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [2:7608486604234148646:2563], Recipient [2:7608486574169374565:2148]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037931 TableLocalId: 57 Generation: 1 Round: 3 TableStats { DataSize: 720 RowCount: 20 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488871370 LastUpdateTime: 1771488834165 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 20 RowDeletes: 0 RowReads: 0 RangeReads: 35 PartCount: 1 RangeReadRows: 51 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 720 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 503 Memory: 119814 Storage: 837 } ShardState: 2 UserTablePartOwners: 72075186224037931 NodeId: 2 StartTime: 1771488833702 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:14:33.730971Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:33.730989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037931 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 57] state 'Ready' dataSize 720 rowCount 20 cpuUsage 0.0503 2026-02-19T08:14:33.731091Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037931 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 57] raw table stats: DataSize: 720 RowCount: 20 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771488871370 LastUpdateTime: 1771488834165 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 20 RowDeletes: 0 RowReads: 0 RangeReads: 35 PartCount: 1 RangeReadRows: 51 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 720 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:33.833991Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486574169374565:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.834048Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.834062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-02-19T08:14:33.834121Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 2 2026-02-19T08:14:33.834142Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-02-19T08:14:33.834216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 56 shard idx 72057594046644480:43 data size 294 row count 6 2026-02-19T08:14:33.834270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037930 maps to shardIdx: 72057594046644480:43 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 56], pathId map=indexImplLevelTable, is column=0, is olap=0, RowCount 6, DataSize 294 2026-02-19T08:14:33.834284Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037930, followerId 0 2026-02-19T08:14:33.834361Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:43 with partCount# 1, rowCount# 6, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:33.834423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037930: SplitByLoadNotEnabledForTable 2026-02-19T08:14:33.834462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 57 shard idx 72057594046644480:44 data size 720 row count 20 2026-02-19T08:14:33.834488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037931 maps to shardIdx: 72057594046644480:44 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 57], pathId map=indexImplPostingTable, is column=0, is olap=0, RowCount 20, DataSize 720 2026-02-19T08:14:33.834498Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037931, followerId 0 2026-02-19T08:14:33.834535Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:44 with partCount# 1, rowCount# 20, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:14:33.834554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037931: SplitByLoadNotEnabledForTable 2026-02-19T08:14:33.834614Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:14:33.837501Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [2:7608486574169374565:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.837534Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:33.837553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:14:34.078070Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:7608486574169374565:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:34.078119Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:14:34.078173Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [2:7608486574169374565:2148], Recipient [2:7608486574169374565:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:14:34.078212Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletReboots >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> KqpOlapScheme::DropColumnTableStoreErrors [GOOD] >> KqpOlapScheme::DropTableAfterInsert >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddIndex >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> KqpScheme::CreateAndAlterTableWithPartitioningByLoadCompat [GOOD] >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBootWhenLocked |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::Discard+DiscardSelectIsOn |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue >> THiveTest::TestExternalBootWhenLocked [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpScheme::CreateTableWithVectorIndexCoveredPublicApi [GOOD] >> KqpScheme::CreateTransfer >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> KqpScheme::DropTransfer [GOOD] >> KqpScheme::DropTransfer_QueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2026-02-19T08:13:39.623226Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.655804Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.658638Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.658743Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.658831Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.659197Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:39.660362Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:39.660425Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.661371Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:78:2078] ControllerId# 72057594037932033 2026-02-19T08:13:39.661411Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.661531Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.661655Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.675491Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:39.675562Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:39.678066Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:86:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678228Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:87:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678384Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:88:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678511Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:89:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678631Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:90:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678736Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:91:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678883Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:92:2089] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.678916Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:39.679013Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:78:2078] 2026-02-19T08:13:39.679053Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:78:2078] 2026-02-19T08:13:39.679105Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:39.679172Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.680170Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.680259Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.683376Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.683847Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.683897Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.683945Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.684148Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.685024Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:103:2077] ControllerId# 72057594037932033 2026-02-19T08:13:39.685061Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.685124Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.685246Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.687183Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.687314Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.687501Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.690479Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.690977Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.691023Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:2:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.691070Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.691265Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.692170Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:115:2077] ControllerId# 72057594037932033 2026-02-19T08:13:39.692215Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.692279Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.692383Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.705567Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:39.705640Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:39.707713Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:122:2081] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.707865Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:123:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.707997Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:124:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.708150Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:125:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.708507Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:126:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.708642Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:127:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.710488Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:128:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.710552Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:39.710656Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:115:2077] 2026-02-19T08:13:39.710689Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:115:2077] 2026-02-19T08:13:39.710740Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:39.710785Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.711304Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.711492Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:39.724566Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:78:2078] 2026-02-19T08:13:39.724651Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:39.733804Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 ... :41.992317Z node 68 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 316b +(0, 0b), 2 trc, -0b acc} 2026-02-19T08:14:41.993045Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:388: TClient[72075186224037888] peer shutdown [68:450:2287] 2026-02-19T08:14:41.993206Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:973: Handle TEvTabletProblem tabletId: 72075186224037888 actor: [68:384:2238] entry.State: StNormal 2026-02-19T08:14:41.993309Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037927937] send [68:100:2093] 2026-02-19T08:14:41.993374Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [68:100:2093] 2026-02-19T08:14:41.993455Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:378: TClient[72075186224037888] peer closed [68:450:2287] 2026-02-19T08:14:41.993509Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [68:450:2287] 2026-02-19T08:14:41.993619Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:99:2093] EventType# 268960257 2026-02-19T08:14:41.993680Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:973: Handle TEvTabletProblem tabletId: 72075186224037888 actor: [68:384:2238] entry.State: StNormal 2026-02-19T08:14:41.993919Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2026-02-19T08:14:41.994016Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:14:41.994177Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:14:41.994289Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{21, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:14:41.994548Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-02-19T08:14:41.994634Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:14:41.994736Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:14:41.994827Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:14:41.995317Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [68:463:2294] 2026-02-19T08:14:41.995377Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [68:463:2294] 2026-02-19T08:14:41.995534Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [68:384:2238] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:14:41.995622Z node 68 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 68 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72075186224037888 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:14:41.995818Z node 68 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:14:41.995979Z node 68 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-02-19T08:14:41.996075Z node 68 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-02-19T08:14:41.996120Z node 68 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-02-19T08:14:41.996207Z node 68 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [68:384:2238] CurrentLeaderTablet: [68:400:2249] CurrentGeneration: 1 CurrentStep: 0} 2026-02-19T08:14:41.996320Z node 68 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [68:384:2238] CurrentLeaderTablet: [68:400:2249] CurrentGeneration: 1 CurrentStep: 0} 2026-02-19T08:14:41.996413Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72075186224037888 leader: [68:384:2238] followers: 0 2026-02-19T08:14:41.996528Z node 68 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 68 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72075186224037888 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:14:41.996637Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [68:463:2294] 2026-02-19T08:14:41.996699Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [68:463:2294] 2026-02-19T08:14:41.997170Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [69:465:2163] 2026-02-19T08:14:41.997242Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [69:465:2163] 2026-02-19T08:14:41.997320Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [69:465:2163] 2026-02-19T08:14:41.997424Z node 69 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:14:41.997512Z node 69 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 69 selfDC: 2 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [68:331:2201] 2026-02-19T08:14:41.997600Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [69:465:2163] 2026-02-19T08:14:41.997671Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 68 [69:465:2163] 2026-02-19T08:14:41.997882Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [69:465:2163] 2026-02-19T08:14:41.997962Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [69:465:2163] 2026-02-19T08:14:41.998323Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [69:465:2163] 2026-02-19T08:14:41.998728Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [69:465:2163] 2026-02-19T08:14:41.998825Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [69:465:2163] 2026-02-19T08:14:41.998888Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [69:465:2163] 2026-02-19T08:14:41.999020Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [69:465:2163] 2026-02-19T08:14:41.999101Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [69:465:2163] 2026-02-19T08:14:41.999159Z node 69 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [69:465:2163] 2026-02-19T08:14:41.999432Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [69:453:2158] EventType# 268697624 2026-02-19T08:14:41.999628Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2026-02-19T08:14:41.999712Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:14:41.999986Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2026-02-19T08:14:42.000081Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:14:42.022288Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [94999ccdc54a9387] bootstrap ActorId# [68:468:2297] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:127:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:14:42.022494Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [94999ccdc54a9387] Id# [72057594037927937:2:9:0:0:127:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:42.022592Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [94999ccdc54a9387] restore Id# [72057594037927937:2:9:0:0:127:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:14:42.022694Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [94999ccdc54a9387] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG33 2026-02-19T08:14:42.022768Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [94999ccdc54a9387] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:127:1] Marker# BPG32 2026-02-19T08:14:42.022980Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:127:1] FDS# 127 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-02-19T08:14:42.028607Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [94999ccdc54a9387] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:127:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 81000 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-02-19T08:14:42.028812Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [94999ccdc54a9387] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-02-19T08:14:42.028918Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [94999ccdc54a9387] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:14:42.029220Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:127:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-02-19T08:14:42.029402Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> KqpScripting::NoAstSizeLimit [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |90.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> Channels20::CaIntegrationAgg [GOOD] >> Channels20::CaIntegrationIc >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestDeleteTabletError >> KqpOlapScheme::DropTableAfterInsert [GOOD] >> KqpOlapScheme::DropColumnAndResetTtl >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::BinaryJsonOffsetNormal >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: 2026-02-19T08:14:31.476723Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486770326888864:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:31.480167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:31.807192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:31.811855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:31.868948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:31.882238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:31.895203Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:31.917862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486770326888745:2081] 1771488871456518 != 1771488871456521 2026-02-19T08:14:32.093475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:32.118263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:32.118296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:32.118303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:32.118380Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:32.474843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:32.742106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:32.748820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:32.821159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.030021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.275807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.362371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.399297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486787506759808:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.399395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.399871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486787506759818:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.399923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.775893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.809340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.842694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.902083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.933489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.975926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.015618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.072124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.168930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791801727985:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.169031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.169347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791801727990:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.169390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791801727991:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.169499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.174452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:36.193600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486791801727994:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:36.279784Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486791801728045:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:36.472493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486770326888864:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:36.472576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:38.282013Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488878306, txId: 281474976710673] shutting down 2026-02-19T08:14:39.328886Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486804206247574:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:39.329095Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:39.370411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:39.510154Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:39.510286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:39.510926Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:39.517849Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486804206247527:2081] 1771488879327672 != 1771488879327675 2026-02-19T08:14:39.583932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:39.682447Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:39.822371Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:39.822396Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:39.822404Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:39.822484Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:40.335207Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:40.351220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:43.648971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117611:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.649411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.650005Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117620:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.650064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.667358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.778683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117717:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.778761Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.779112Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117719:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.779152Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.848089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117729:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.848181Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.848705Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117734:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.848752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486821386117735:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.848917Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.853979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:43.872228Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486821386117738:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:14:43.964286Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486821386117789:2606] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScheme::ResourcePoolClassifiersValidation [GOOD] >> KqpScheme::ResourcePoolClassifiersRankValidation |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |90.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:35.997785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:36.108846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:36.131354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:36.131436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:36.135890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:36.412874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:36.413016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:36.490669Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488752771616 != 1771488752771620 2026-02-19T08:12:36.499280Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:36.546049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:36.705668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:37.036641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:37.055443Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:37.203660Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:12:37.203784Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:12:37.203933Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:12:37.335005Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:12:37.335118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:12:37.335887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:12:37.336028Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:12:37.336445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:12:37.336611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:12:37.336755Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:12:37.337096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:12:37.338703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:37.339717Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:12:37.339787Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:12:37.371357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:37.372383Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:37.372653Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:12:37.372893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:37.381773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:37.412615Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:37.412772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:37.414560Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:37.414663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:37.414723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:37.415104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:37.415247Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:37.415334Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:12:37.415891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:37.487873Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:37.488105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:37.488212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:12:37.488242Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:37.488267Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:37.488353Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:37.488551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:37.488592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:37.488902Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:37.488989Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:37.489055Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:37.489095Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:37.489132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:37.489160Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:37.489189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:37.489220Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:37.489255Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:37.489307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:37.489334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:37.489367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:12:37.489445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:12:37.489504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:37.489629Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:37.489919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:37.489985Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:37.490077Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... 8:14:45.690523Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:45.690580Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:45.690645Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:14:45.690691Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:45.690736Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-02-19T08:14:45.690847Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:14:45.690930Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:14:45.691016Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2026-02-19T08:14:45.691215Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-02-19T08:14:45.691329Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:45.691685Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [16:1111:2879], Table: `/Root/table` ([72057594046644480:38:1]), SessionActorId: [16:1050:2879]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:1111:2879].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-02-19T08:14:45.691979Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [16:1105:2879], SessionActorId: [16:1050:2879], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:1050:2879]. 2026-02-19T08:14:45.692484Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=16&id=MTk4YjExMC00NDlkN2ZjMC1jNjM4YTM3YS03Zjk1MTM0ZA==, ActorId: [16:1050:2879], ActorState: ExecuteState, LegacyTraceId: 01khtfeej38pdbwcgny8tkpcy1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:1106:2879] from: [16:1105:2879] trace_id# 2026-02-19T08:14:45.692690Z node 16 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [16:1106:2879] TxId: 281474976710663. Ctx: { TraceId: 01khtfeej38pdbwcgny8tkpcy1, Database: , SessionId: ydb://session/3?node_id=16&id=MTk4YjExMC00NDlkN2ZjMC1jNjM4YTM3YS03Zjk1MTM0ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 } {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } ] trace_id# 2026-02-19T08:14:45.693329Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=16&id=MTk4YjExMC00NDlkN2ZjMC1jNjM4YTM3YS03Zjk1MTM0ZA==, ActorId: [16:1050:2879], ActorState: ExecuteState, LegacyTraceId: 01khtfeej38pdbwcgny8tkpcy1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } trace_id# 2026-02-19T08:14:45.694820Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 278003712, Sender [16:1105:2879], Recipient [16:899:2777]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-02-19T08:14:45.694866Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-02-19T08:14:45.694992Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435074, Sender [16:899:2777], Recipient [16:899:2777]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:14:45.695026Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3243: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:14:45.695093Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-02-19T08:14:45.695207Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-02-19T08:14:45.695331Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976710661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-02-19T08:14:45.695431Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-02-19T08:14:45.695475Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:14:45.695504Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-02-19T08:14:45.695534Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:45.695564Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:14:45.695611Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v501/0 ImmediateWriteEdgeReplied# v501/0 2026-02-19T08:14:45.695683Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-02-19T08:14:45.695717Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:14:45.695746Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:45.695773Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:14:45.695799Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:14:45.695825Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:14:45.695848Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:14:45.695871Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-02-19T08:14:45.695896Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:14:45.695946Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:6] at 72075186224037888 2026-02-19T08:14:45.696070Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-02-19T08:14:45.696144Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:561: Skip empty write operation for [0:6] at 72075186224037888 2026-02-19T08:14:45.696265Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:14:45.696368Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:14:45.696420Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:14:45.696482Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:14:45.696538Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:14:45.696590Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-02-19T08:14:45.696621Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:14:45.696650Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:45.696676Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:45.696725Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:14:45.696752Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:45.696779Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-02-19T08:14:45.696840Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:14:45.696868Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:14:45.696902Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:14:45.696973Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:45.699811Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [16:71:2118], Recipient [16:899:2777]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710661 LockNode: 16 Status: STATUS_NOT_FOUND 2026-02-19T08:14:45.704265Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [16:1119:2929], Recipient [16:899:2777]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:45.704388Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:45.704464Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [16:1118:2928], serverId# [16:1119:2929], sessionId# [0:0:0] 2026-02-19T08:14:45.704711Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553224, Sender [16:834:2728], Recipient [16:899:2777]: NKikimr::TEvDataShard::TEvGetOpenTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:37.017258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:37.166473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:37.188379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:37.188467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:37.189021Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:37.483473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:37.483622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:37.552373Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488753601263 != 1771488753601267 2026-02-19T08:12:37.560719Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:37.607890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:37.711070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:38.000854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:38.016221Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:38.119981Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:12:38.120058Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:12:38.120166Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:12:38.283792Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:12:38.283887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:12:38.284589Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:12:38.284704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:12:38.285076Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:12:38.285212Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:12:38.285349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:12:38.285660Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:12:38.287486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.288637Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:12:38.288730Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:12:38.324845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:38.326047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:38.326368Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:12:38.326640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:38.344469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:38.385733Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:38.385926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:38.387895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:38.387988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:38.388049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:38.388457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:38.388607Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:38.388754Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:12:38.389316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:38.453053Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:38.453288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:38.453406Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:12:38.453445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:38.453483Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:38.453576Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:38.453858Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.453926Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.454300Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:38.454424Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:38.454522Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:38.454571Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:38.454616Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:38.454651Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:38.454851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:38.454892Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:38.454956Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:38.455041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.455077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.455117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:12:38.455200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:12:38.455250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:38.455360Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:38.455605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:38.455657Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:38.455757Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... node 13 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2026-02-19T08:14:43.041128Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1272:3073]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:14:43.751518Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:14:43.751644Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:69:2116] TxId# 281474976710671 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-02-19T08:14:43.755870Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [13:1844:3548], Recipient [13:999:2854]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-02-19T08:14:43.756166Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:14:43.756268Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037889 CompleteEdge# v9001/281474976710670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:14:43.756350Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037889 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-02-19T08:14:43.756471Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-02-19T08:14:43.756705Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:14:43.756790Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:14:43.756874Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:43.756935Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:14:43.757001Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-02-19T08:14:43.757075Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:14:43.757109Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:43.757135Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:14:43.757162Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:14:43.757322Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:14:43.757837Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[13:1844:3548], 0} after executionsCount# 1 2026-02-19T08:14:43.757963Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[13:1844:3548], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-02-19T08:14:43.758119Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[13:1844:3548], 0} finished in read 2026-02-19T08:14:43.758257Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:14:43.758292Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:14:43.758325Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:14:43.758355Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:14:43.758411Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:14:43.758441Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:14:43.758478Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-02-19T08:14:43.758560Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:14:43.758752Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:14:43.760869Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [13:1844:3548], Recipient [13:999:2854]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:14:43.760974Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2026-02-19T08:14:44.373065Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:14:44.373173Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:69:2116] TxId# 281474976710672 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-02-19T08:14:44.376992Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [13:1873:3573], Recipient [13:1272:3073]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-02-19T08:14:44.377328Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-02-19T08:14:44.377424Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037891 CompleteEdge# v9001/281474976710670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:14:44.377498Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037891 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-02-19T08:14:44.377618Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2026-02-19T08:14:44.377856Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-02-19T08:14:44.377937Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2026-02-19T08:14:44.378009Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:44.378075Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2026-02-19T08:14:44.378140Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2026-02-19T08:14:44.378198Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-02-19T08:14:44.378241Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:44.378265Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2026-02-19T08:14:44.378289Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2026-02-19T08:14:44.378441Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:14:44.378865Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037891 Complete read# {[13:1873:3573], 0} after executionsCount# 1 2026-02-19T08:14:44.378975Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037891 read iterator# {[13:1873:3573], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-02-19T08:14:44.379101Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037891 read iterator# {[13:1873:3573], 0} finished in read 2026-02-19T08:14:44.379219Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-02-19T08:14:44.379265Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2026-02-19T08:14:44.379297Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2026-02-19T08:14:44.379329Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2026-02-19T08:14:44.379388Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-02-19T08:14:44.379411Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2026-02-19T08:14:44.379444Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037891 has finished 2026-02-19T08:14:44.379521Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-02-19T08:14:44.379687Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2026-02-19T08:14:44.380567Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [13:1873:3573], Recipient [13:1272:3073]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:14:44.380664Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> KqpPragma::Warning [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpYql::Discard+DiscardSelectIsOn [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: 2026-02-19T08:14:31.994493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486769811477557:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:31.994639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:32.295505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:32.337154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:32.337265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:32.387086Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:32.400201Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486769811477325:2081] 1771488871960957 != 1771488871960960 2026-02-19T08:14:32.410312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:32.518492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:32.561274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:32.561303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:32.561310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:32.562218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:32.990089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:33.157101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:33.239149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.402307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.589868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:33.670755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.804003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486786991348397:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.804113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.804674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486786991348407:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:35.804731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.251908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.298124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.365314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.410533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.453849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.498591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.535682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.589984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.682058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791286316575:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.682134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.682634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791286316580:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.682670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486791286316581:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.682788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.687555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:36.700751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486791286316584:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:36.767543Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486791286316635:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:36.994694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486769811477557:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:36.994774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:39.183700Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486804171218875:2552], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial, At funct ... tion { row: 2 column: 20 } message: "At function: PersistableRepr" end_position { row: 2 column: 20 } severity: 1 issues { position { row: 2 column: 26 } message: "At function: Member" end_position { row: 2 column: 26 } severity: 1 issues { position { row: 2 column: 26 } message: "Member not found: _yql_partition_id" end_position { row: 2 column: 26 } severity: 1 } } } } } } } } } } tx_id# trace_id# 2026-02-19T08:14:40.323443Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486807668536693:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:40.323510Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:40.359513Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:40.506220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:40.506301Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:40.537227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:40.539337Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:40.541157Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486807668536549:2081] 1771488880286285 != 1771488880286288 2026-02-19T08:14:40.664072Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:40.714435Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:40.714459Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:40.714469Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:40.714568Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:41.341920Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:41.609275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:41.625693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:41.721012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:41.923351Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.033911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:44.887018Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486824848407602:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:44.887121Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:44.887379Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486824848407612:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:44.887435Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:44.995244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.038354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.098720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.160360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.208204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.261699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.324566Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486807668536693:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:45.324637Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:45.325601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.428139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.569656Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829143375785:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.569780Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.570163Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829143375790:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.570206Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829143375791:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.570328Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.577752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:45.591658Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486829143375794:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:45.672319Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486829143375845:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: 2026-02-19T08:14:32.556966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486771911340627:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:32.557014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:32.978129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:32.978290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:33.006397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:33.026988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:33.029764Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:33.030842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486771911340430:2081] 1771488872502089 != 1771488872502092 2026-02-19T08:14:33.217117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:33.326421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:33.326449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:33.326457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:33.326527Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:33.562137Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:33.863773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:33.885331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:33.937320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:34.125119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:34.324965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:34.405240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.533191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486789091211492:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.533312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.534407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486789091211502:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.534533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:36.873197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.920458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.957617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.996367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:37.055704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:37.093156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:37.135373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:37.195483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:37.340581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486793386179668:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:37.340686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:37.341070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486793386179673:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:37.341103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486793386179674:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:37.341213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:37.346086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:37.364597Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486793386179677:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:37.456259Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486793386179728:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:37.557960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486771911340627:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:37.558039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:39.911085Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488879916, txId: 281474976710673] shutting down 2026-02-19T08:14:41.361813Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:41.362056Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:14:41.379772Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:41.389890Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486812482047795:2081] 1771488881127849 != 1771488881127852 2026-02-19T08:14:41.419171Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:41.419254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:41.447073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:41.654370Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:41.658912Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:41.658934Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:41.658941Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:41.659020Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:42.148748Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:42.219140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:42.226891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:42.235650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.305164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.483369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.604806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.139780Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829661918858:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.139862Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.140305Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829661918868:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.140345Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.291834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.341418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.410070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.474702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.536529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.639906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.734415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.888734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.104851Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833956887043:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.104964Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.105479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833956887048:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.105526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833956887049:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.105624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.110601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:46.156576Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486833956887052:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:46.241387Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486833956887106:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:49.090760Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488889121, txId: 281474976710673] shutting down 2026-02-19T08:14:49.255196Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488889268, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard+DiscardSelectIsOn [GOOD] Test command err: 2026-02-19T08:14:34.402357Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486780702558640:2184];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:34.404342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:34.658636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:34.684808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:34.684907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:34.757195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486780702558484:2081] 1771488874385765 != 1771488874385768 2026-02-19T08:14:34.771795Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:34.778166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:34.985386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:35.006697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:35.006717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:35.006723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:35.006855Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:35.411558Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:35.574741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:35.583061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:35.637810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:35.834301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.026854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.113325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.065023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797882429547:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.065114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.065474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797882429557:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.065518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.410656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.467206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.513457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.559133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.595103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.647691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.701371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.767941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.862627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797882430434:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.862712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.863063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797882430439:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.863121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797882430440:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.863236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.867691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:38.882519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486797882430443:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:38.962274Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486797882430494:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:39.399855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486780702558640:2184];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:39.399959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2026-02-19T08:14:41.837530Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486812120459232:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:41.837576Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:41.907575Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:41.984652Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:41.986272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:41.986371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:41.993779Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486812120459202:2081] 1771488881834444 != 1771488881834447 2026-02-19T08:14:42.016543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:42.135835Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:42.193963Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:42.193986Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:42.193993Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:42.194072Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:42.665485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:42.681023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:14:42.693373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.778322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.918937Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:42.997957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.106126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.450033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829300330254:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.450114Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.450502Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486829300330264:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.450541Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:45.512343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.565859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.608635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.659910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.711158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.778605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.838641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.969044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.145749Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833595298431:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.147234Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.147964Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833595298436:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.148023Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486833595298437:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.148141Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.152866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:46.163863Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486833595298440:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:14:46.222120Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486833595298491:3756] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:46.845840Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486812120459232:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:46.845930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpIndexes::DuplicateUpsertInterleave-UseStreamIndex |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: 2026-02-19T08:11:21.830926Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485950575224632:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:21.830994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:22.321823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:22.349452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:22.349526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:22.491041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485950575224605:2081] 1771488681829031 != 1771488681829034 2026-02-19T08:11:22.506164Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:22.511269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:22.521831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:22.715649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:22.715684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:22.715694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:22.715768Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:22.749060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:22.756450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:22.778268Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7608485954870192745:2300] 2026-02-19T08:11:22.778517Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:22.794082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:22.794151Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:22.795942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:22.795999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:22.796067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:22.796383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:22.796433Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:22.796479Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:7608485954870192760:2300] in generation 1 2026-02-19T08:11:22.802297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:22.855351Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:22.855489Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:22.855550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:7608485954870192762:2301] 2026-02-19T08:11:22.855567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:22.855579Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:22.855614Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:22.855941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:22.856055Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:22.856118Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:22.856161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485954870192743:2499], serverId# [1:7608485954870192747:2500], sessionId# [0:0:0] 2026-02-19T08:11:22.856194Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:22.856207Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:22.856235Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:22.856249Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:22.856324Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:22.856754Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:22.856837Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:22.861838Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:22.864814Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:22.865019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:22.867554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485954870192785:2518], serverId# [1:7608485954870192787:2520], sessionId# [0:0:0] 2026-02-19T08:11:22.874045Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1771488682915 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488682915 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:22.874073Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:22.874198Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:22.874262Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:22.874281Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:22.874301Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771488682915:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:22.874644Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1771488682915:281474976715657 keys extracted: 0 2026-02-19T08:11:22.874781Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:22.874849Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:22.874873Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:22.877525Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:22.878253Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:22.879964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1771488682914 2026-02-19T08:11:22.879978Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:22.880007Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1771488682915} 2026-02-19T08:11:22.880054Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:22.880081Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:22.880093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:22.880145Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:22.880182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771488682915 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7608485954870192258:2146], exec latency: 3 ms, propose latency: 5 ms 2026-02-19T08:11:22.880203Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:22.880252Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:22.880391Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1771488682922 2026-02-19T ... 3: Complete [4500 : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:14:48.093775Z node 25 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-02-19T08:14:48.093959Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:14:48.097586Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2026-02-19T08:14:48.110079Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2026-02-19T08:14:48.147589Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 4500} 2026-02-19T08:14:48.147708Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:14:48.147754Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:14:48.147846Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [4500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:14:48.147909Z node 25 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-02-19T08:14:48.147995Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:48.151497Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2026-02-19T08:14:48.151606Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:14:48.206090Z node 25 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:14:48.206183Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.206235Z node 25 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:14:48.206283Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.206318Z node 25 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:14:48.251745Z node 25 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:14:48.251833Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.251875Z node 25 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:14:48.251926Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.251964Z node 25 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:14:48.285518Z node 25 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:14:48.285613Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.285657Z node 25 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:14:48.285702Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.285756Z node 25 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:14:48.323683Z node 25 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:14:48.323779Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.323819Z node 25 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:14:48.323863Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.323900Z node 25 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:14:48.360076Z node 25 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:14:48.360152Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.360191Z node 25 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:14:48.360255Z node 25 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:14:48.360297Z node 25 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:14:48.608745Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:8] at 72075186224037888 2026-02-19T08:14:48.608978Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2026-02-19T08:14:48.609131Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:808: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-02-19T08:14:48.609503Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:14:48.609892Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:14:48.610025Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:48.610506Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [25:1406:3077], Table: `/Root/Table` ([72057594046644480:38:3]), SessionActorId: [25:1350:3077]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1406:3077].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-02-19T08:14:48.610889Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [25:1400:3077], SessionActorId: [25:1350:3077], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1350:3077]. 2026-02-19T08:14:48.611496Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=25&id=ODk2YWVmYzAtMmFmMmVkMmQtOWNiMmZjNGQtODc1NmQ3OTI=, ActorId: [25:1350:3077], ActorState: ExecuteState, LegacyTraceId: 01khtfehhjd8tb9yfeb5g1e6j5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1486:3077] from: [25:1400:3077] trace_id# 2026-02-19T08:14:48.611832Z node 25 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [25:1486:3077] TxId: 281474976715665. Ctx: { TraceId: 01khtfehhjd8tb9yfeb5g1e6j5, Database: , SessionId: ydb://session/3?node_id=25&id=ODk2YWVmYzAtMmFmMmVkMmQtOWNiMmZjNGQtODc1NmQ3OTI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ] trace_id# 2026-02-19T08:14:48.612659Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=25&id=ODk2YWVmYzAtMmFmMmVkMmQtOWNiMmZjNGQtODc1NmQ3OTI=, ActorId: [25:1350:3077], ActorState: ExecuteState, LegacyTraceId: 01khtfehhjd8tb9yfeb5g1e6j5, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Table`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } trace_id# 2026-02-19T08:14:48.614262Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:9] at 72075186224037888 2026-02-19T08:14:48.614371Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:561: Skip empty write operation for [0:9] at 72075186224037888 2026-02-19T08:14:48.614709Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-02-19T08:14:48.620207Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'streamImpl' requestId: 2026-02-19T08:14:48.620384Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:14:48.621540Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-02-19T08:14:48.622276Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2026-02-19T08:14:48.622412Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-02-19T08:14:48.622622Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:04.000000Z 2026-02-19T08:14:48.622833Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-02-19T08:14:48.623036Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:14:48.623254Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-02-19T08:14:48.623609Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2026-02-19T08:14:48.624494Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> KqpScripting::JoinIndexLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: 2026-02-19T08:14:34.708142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486780096679916:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:34.713780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:34.805872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:14:35.228160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:35.228268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:35.285761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:35.321887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486780096679683:2081] 1771488874662870 != 1771488874662873 2026-02-19T08:14:35.349346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:35.364370Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:35.483832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:35.483852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:35.483862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:35.483949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:35.579736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:35.713832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:35.971033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:35.978401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:14:36.047099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.188440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.359454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.450042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.793176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797276550755:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.793329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.793790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486797276550765:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.793866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.167448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.211332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.268752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.367373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.408754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.464010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.516185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.586606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.703511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486801571518933:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.703572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.703842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486801571518938:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.703886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486801571518939:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.703970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.707168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:39.708008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486780096679916:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:39.708136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:39.719605Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486801571518942:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:14:39.800852Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486801571518998:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges ... T08:14:43.575251Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:43.589243Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:43.593895Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486821795993145:2081] 1771488883367642 != 1771488883367645 2026-02-19T08:14:43.607913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:43.698383Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:43.698404Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:43.698412Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:43.698489Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:43.769836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:44.182380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:44.203566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:44.232240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:44.348990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:44.358978Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:44.511070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:44.597596Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.903181Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486838975864202:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.903275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.903765Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486838975864212:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.903832Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.969778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.037881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.090700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.137867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.187961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.251690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.308442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.362512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.402127Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486821795993197:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:48.414587Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:48.472920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486843270832377:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.473006Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.473324Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486843270832382:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.473380Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486843270832383:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.473471Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.478053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:48.488805Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486843270832386:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:48.588920Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486843270832438:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:50.671230Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608486851860767437:2077], tablet: [2:7608486826090961482:2320], scanId: 2, table: /Root/EightShard 2026-02-19T08:14:50.671296Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608486851860767439:2078], tablet: [2:7608486826090961480:2319], scanId: 1, table: /Root/EightShard 2026-02-19T08:14:50.671334Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608486851860767441:2079], tablet: [2:7608486826090961476:2317], scanId: 4, table: /Root/EightShard 2026-02-19T08:14:50.671363Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608486851860767443:2080], tablet: [2:7608486826090961484:2321], scanId: 3, table: /Root/EightShard 2026-02-19T08:14:50.686060Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488890682, txId: 281474976710673] shutting down |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> VDiskBalancing::TestRandom_Mirror3dc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: 2026-02-19T08:14:34.569657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486782027518096:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:34.569704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:34.977695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:34.977817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:35.114846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:35.169879Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486782027517974:2081] 1771488874530972 != 1771488874530975 2026-02-19T08:14:35.179592Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:35.191045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:35.416000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:35.416021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:35.416028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:35.416101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:35.485098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:35.644552Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:35.993655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:36.056198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.215416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.386689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.459610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.616411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486799207389050:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.616583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.617423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486799207389062:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.617483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.006750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.043330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.079366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.120538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.181666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.241487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.298907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.367807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.473866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803502357228:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.473960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.474304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803502357233:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.474344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803502357234:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.474454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.479164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:39.500977Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486803502357237:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:39.576720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486782027518096:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:39.576827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:39.579247Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486803502357290:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script ... ;send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:42.498310Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:42.685823Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:42.687802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:42.687895Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:42.703907Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486815492403508:2081] 1771488882496860 != 1771488882496863 2026-02-19T08:14:42.712103Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:42.724048Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:42.906294Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:42.906317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:42.906323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:42.906397Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:42.991888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:43.526254Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:43.544153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:43.571861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.670892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.964352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:44.079602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.505845Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486815492403549:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:47.505931Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:47.882309Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486836967241855:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.882400Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.882815Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486836967241865:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.882890Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.116038Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.148868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.192230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.237550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.322546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.400160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.475279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.549417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.667670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486841262210042:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.667835Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.668133Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486841262210047:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.668188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486841262210048:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.668225Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:48.672504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:48.683709Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486841262210051:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:48.825428Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486841262210102:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:50.723395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.171281Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488891207, txId: 281474976710675] shutting down >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 |90.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardForcedCompactionTest::SchemeshardShouldCompactMultipleTables Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:19.066900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:19.066996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:19.067092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:19.067132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:19.067165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:19.067205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:19.067250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:19.067277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:19.067342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:19.067423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:19.068305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:19.068603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:19.212434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:19.212495Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:19.235706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:19.236412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:19.236638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:19.251751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:19.252513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:19.253250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:19.253579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:19.257022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.257210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:19.258526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:19.258612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.258721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:19.258769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:19.258808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:19.258977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:19.434273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.435926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:19.436541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ext&) /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1882:9 #24 0x000017af07c7 in operator() /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1 #25 0x000017af07c7 in __invoke<(lambda at /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:179:25 #26 0x000017af07c7 in __call<(lambda at /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:251:5 #27 0x000017af07c7 in __invoke_r /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:273:10 #28 0x000017af07c7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:167:12 #29 0x000017af07c7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:319:10 #30 0x0000183b91d9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:436:12 #31 0x0000183b91d9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:995:10 #32 0x0000183b91d9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:527:20 #33 0x000018391cc7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:378:18 #34 0x000017aefadc in NKikimr::NSchemeShard::NTestSuiteTSchemeshardForcedCompactionTest::TCurrentTest::Execute() /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1 #35 0x00001839347f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:499:19 #36 0x0000183b32ec in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:895:44 #37 0x7f2825852d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) previously allocated by thread T0 here: #0 0x000017bd8acd in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x00002cb63ffe in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/__new/allocate.h:37:10 #2 0x00002cb63ffe in __libcpp_allocate<__yhashtable_node::TItem> > /-S/contrib/libs/cxxsupp/libcxx/include/__new/allocate.h:64:28 #3 0x00002cb63ffe in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:105:14 #4 0x00002cb63ffe in get_node /-S/util/generic/hash_table.h:497:43 #5 0x00002cb63ffe in new_node /-S/util/generic/hash_table.h:947:19 #6 0x00002cb63ffe in __yhashtable_iterator::TItem> THashTable::TItem, NKikimr::TFifoQueue::TItem, NKikimr::TFifoQueue::TItem::THash, TIdentity, NKikimr::TFifoQueue::TItem::TEqual, std::__y1::allocator::TItem>>::emplace_direct(__yhashtable_node::TItem>**, NKikimr::TPathId&) /-S/util/generic/hash_table.h:697:21 #7 0x00002cb63e29 in emplace_direct /-S/util/generic/hash_set.h:192:20 #8 0x00002cb63e29 in bool NKikimr::TFifoQueue::PushBack(NKikimr::TPathId&) /-S/ydb/core/util/circular_queue.h:74:30 #9 0x00002cb5b8e4 in Enqueue /-S/ydb/core/util/circular_queue.h:62:16 #10 0x00002cb5b8e4 in NKikimr::NSchemeShard::TSchemeShard::AddForcedCompaction(TIntrusivePtr> const&) /-S/ydb/core/tx/schemeshard/schemeshard_forced_compaction.cpp:13:37 #11 0x00002cb776c7 in NKikimr::NSchemeShard::TSchemeShard::TForcedCompaction::TTxCreate::DoExecute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/schemeshard/schemeshard_forced_compaction__create.cpp:111:15 #12 0x00002b4229a8 in NKikimr::NSchemeShard::TSchemeShard::TRwTxBase::Execute(NKikimr::NTabletFlatExecutor::TTransactionContext&, NActors::TActorContext const&) /-S/ydb/core/tx/schemeshard/schemeshard_impl.cpp:1866:9 #13 0x00001db68670 in NKikimr::NTabletFlatExecutor::TExecutor::ExecuteTransaction(NKikimr::NTabletFlatExecutor::TSeat*) /-S/ydb/core/tablet_flat/flat_executor.cpp:2036:35 #14 0x00001db629ca in NKikimr::NTabletFlatExecutor::TExecutor::DoExecute(TAutoPtr, NKikimr::NTabletFlatExecutor::TExecutor::ETxMode) /-S/ydb/core/tablet_flat/flat_executor.cpp:1950:13 #15 0x00001db6b29e in NKikimr::NTabletFlatExecutor::TExecutor::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/flat_executor.cpp:1964:5 #16 0x00001dafde6a in Execute /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:62:46 #17 0x00001dafde6a in NKikimr::NTabletFlatExecutor::TTabletExecutedFlat::Execute(TAutoPtr, NActors::TActorContext const&) /-S/ydb/core/tablet_flat/tablet_flat_executed.cpp:57:5 #18 0x00002cb60057 in NKikimr::NSchemeShard::TSchemeShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/schemeshard/schemeshard_forced_compaction.cpp:137:5 #19 0x00002b3f2bd4 in NKikimr::NSchemeShard::TSchemeShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/schemeshard/schemeshard_impl.cpp:5461:9 #20 0x00001935fa97 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:350:17 #21 0x0000365fae77 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1722:33 #22 0x0000365f3492 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1311:45 #23 0x0000365fda93 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1570:22 #24 0x00003aecde7c in NKikimr::NSchemeShard::TEvForcedCompaction::TEvCreateResponse* NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TAutoPtr&, std::__y1::function, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:475:13 #25 0x00003ae58c77 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:539:20 #26 0x00003ae58c77 in NSchemeShardUT_Private::TestCompact(NActors::TTestActorRuntime&, unsigned long, unsigned long, TBasicString> const&, TBasicString> const&, Ydb::StatusIds_StatusCode) /-S/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp:2255:27 #27 0x000017ac16a7 in NKikimr::NSchemeShard::NTestSuiteTSchemeshardForcedCompactionTest::TTestCaseSchemeshardShouldCompactMultipleTables::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1882:9 #28 0x000017af07c7 in operator() /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1 #29 0x000017af07c7 in __invoke<(lambda at /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:179:25 #30 0x000017af07c7 in __call<(lambda at /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:251:5 #31 0x000017af07c7 in __invoke_r /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:273:10 #32 0x000017af07c7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:167:12 #33 0x000017af07c7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:319:10 #34 0x0000183b91d9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:436:12 #35 0x0000183b91d9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:995:10 #36 0x0000183b91d9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:527:20 #37 0x000018391cc7 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:378:18 #38 0x000017aefadc in NKikimr::NSchemeShard::NTestSuiteTSchemeshardForcedCompactionTest::TCurrentTest::Execute() /-S/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp:1791:1 #39 0x00001839347f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:499:19 #40 0x0000183b32ec in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:895:44 #41 0x7f2825852d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: heap-use-after-free /-S/ydb/core/scheme/scheme_pathid.cpp:24:23 in NKikimr::TPathId::Hash() const Shadow bytes around the buggy address: 0x7b6824f6a000: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fa 0x7b6824f6a080: fa fa fd fd fd fd fd fa fa fa fd fd fd fd fd fd 0x7b6824f6a100: fa fa fd fd fd fd fd fd fa fa 00 00 00 00 00 fa 0x7b6824f6a180: fa fa fd fd fd fd fd fa fa fa fd fd fd fd fd fd 0x7b6824f6a200: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fd =>0x7b6824f6a280: fa fa fd fd fd fd fd fd fa fa fd fd fd[fd]fd fa 0x7b6824f6a300: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fa 0x7b6824f6a380: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fd 0x7b6824f6a400: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fa 0x7b6824f6a480: fa fa fd fd fd fd fd fd fa fa fd fd fd fd fd fd 0x7b6824f6a500: fa fa fd fd fd fd fd fd fa fa 00 00 00 00 00 fa Shadow byte legend (one shadow byte represents 8 application bytes): Addressable: 00 Partially addressable: 01 02 03 04 05 06 07 Heap left redzone: fa Freed heap region: fd Stack left redzone: f1 Stack mid redzone: f2 Stack right redzone: f3 Stack after return: f5 Stack use after scope: f8 Global redzone: f9 Global init order: f6 Poisoned by user: f7 Container overflow: fc Array cookie: ac Intra object redzone: bb ASan internal: fe Left alloca redzone: ca Right alloca redzone: cb ==226027==ABORTING ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: 2026-02-19T08:14:34.732381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486782207379442:2247];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:34.732434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:35.197813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:35.239782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:35.239889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:35.305531Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:35.310648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:35.438635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:35.438659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:35.438718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:35.438812Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:35.441642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:35.676741Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:35.942646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:36.044673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.248479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.449084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:36.530787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.467137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486799387250271:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.467235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.467703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486799387250281:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.467787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:38.814635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.852559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.887112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.919184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:38.979962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.038396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.079794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.133789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.288910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803682218452:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.289000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.289425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803682218457:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.289459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486803682218458:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.289565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:39.298763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:39.325079Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486803682218461:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:39.428024Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486803682218514:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:39.733869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486782207379442:2247];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:39.733968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:42.433369Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771488882443, txId: 281474976710673] shutting down 2026-02-19T08:14:43.520307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:14:43.520478Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486821083702880:2270];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:43.520531Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:43.777796Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:43.781498Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:43.786053Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608486821083702634:2081] 1771488883444991 != 1771488883444994 2026-02-19T08:14:43.801018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:43.802010Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:43.828780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:44.069807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:44.102328Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:44.102347Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:44.102355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:44.102429Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:44.501120Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:44.890310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:44.897970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:44.920405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.046707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.274498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.404921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.520279Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486821083702880:2270];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:48.523531Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:49.206821Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486846853508291:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.206923Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.208985Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486846853508301:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.209059Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.294779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.340863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.386777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.431229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.469535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.519714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.568023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.648493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.762112Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486846853509177:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.762214Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.762496Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486846853509182:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.762541Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486846853509183:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.762583Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:49.766711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:49.783242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:14:49.784146Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486846853509186:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:49.880784Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486846853509237:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateAndAlterTableWithMinMaxPartitionsUncompat [GOOD] Test command err: 2026-02-19T08:13:47.660371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486579766503273:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:47.663521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:47.922679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:47.942289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:47.942412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:48.050399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:48.055609Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:48.057041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486579766503224:2081] 1771488827654658 != 1771488827654661 2026-02-19T08:13:48.174691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:48.252643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:48.252667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:48.252681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:48.252778Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:48.662101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:48.718201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:48.732700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:13:48.802268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.979453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.138258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.219404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.238497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486596946374288:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.238662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.242453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486596946374298:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.242540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.610876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.641906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.671240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.705696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.738669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.772308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.811561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.866729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.949166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486596946375170:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.949234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.949361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486596946375175:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.949432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486596946375177:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.949474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.953125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:51.964857Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486596946375179:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:52.059202Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486601241342526:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:52.657110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486579766503273:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:52.657228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:53.610290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... 37968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:40.501703Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:40.502892Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:40.504037Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608486807045242595:2081] 1771488880320710 != 1771488880320713 2026-02-19T08:14:40.540796Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:40.545808Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:40.757694Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:40.757734Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:40.757746Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:40.757838Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:41.141015Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:41.370119Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:41.699320Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:41.709950Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:41.732483Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:41.869408Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.206639Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.322613Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:45.329595Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608486807045242639:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:45.329689Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:46.431399Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486832815048261:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.431508Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.431982Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486832815048271:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.432029Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:46.577196Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.656234Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.728742Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.791184Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.869512Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.968077Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.056773Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.174849Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.435228Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486837110016461:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.435338Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.435717Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486837110016466:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.435776Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486837110016467:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.435913Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:47.441583Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:47.472351Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608486837110016470:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:47.573376Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608486837110016521:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:50.940807Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.141511Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases [GOOD] >> TDistconfGenerateConfigTest::BadRack [GOOD] >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] >> KqpYql::BinaryJsonOffsetNormal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 >> BindQueue::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2026-02-19T08:13:32.653083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:32.688064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:32.688329Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:32.696030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:32.696278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:32.696525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:32.696642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:32.696712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:32.696784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:32.696850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:32.696929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:32.697017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:32.697139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.697225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:32.697304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:32.697431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:32.726887Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:32.727034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:32.727095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:32.727307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.727477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:32.727570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:32.727614Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:32.727707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:32.727774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:32.727821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:32.727854Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:32.728068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:32.728149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:32.728193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:32.728225Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:32.728333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:32.728400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:32.728453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:32.728502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:32.728563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:32.728601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:32.728650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:32.728700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:32.728743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:32.728780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:32.728988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:32.729037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:32.729064Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:32.729172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:32.729219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.729240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:32.729281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:32.729316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:32.729340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:32.729371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:32.729396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:32.729416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:32.729513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:32.729553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=94409; 2026-02-19T08:14:50.402042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=314; 2026-02-19T08:14:50.403117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=979; 2026-02-19T08:14:50.403179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=96176; 2026-02-19T08:14:50.403230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=96300; 2026-02-19T08:14:50.403298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-02-19T08:14:50.403760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=392; 2026-02-19T08:14:50.403811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=97405; 2026-02-19T08:14:50.403977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2026-02-19T08:14:50.404098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=73; 2026-02-19T08:14:50.404476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=332; 2026-02-19T08:14:50.404835Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=306; 2026-02-19T08:14:50.430102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=25172; 2026-02-19T08:14:50.467237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=36968; 2026-02-19T08:14:50.467376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-02-19T08:14:50.467454Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=27; 2026-02-19T08:14:50.467504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-02-19T08:14:50.467591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-02-19T08:14:50.467635Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-02-19T08:14:50.467731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2026-02-19T08:14:50.467794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-02-19T08:14:50.467900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=58; 2026-02-19T08:14:50.468016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2026-02-19T08:14:50.468117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=62; 2026-02-19T08:14:50.468152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=172101; 2026-02-19T08:14:50.468297Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=6469960;raw_bytes=6233340;count=1;records=75000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=6380152;raw_bytes=7453340;count=2;records=75200} inactive {blob_bytes=90315584;raw_bytes=94483560;count=214;records=1050200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:14:50.468444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:14:50.468505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:14:50.468606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:14:50.468670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:14:50.468874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:14:50.468937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:14:50.468995Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=5; 2026-02-19T08:14:50.469088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488517247;tx_id=18446744073709551615;;current_snapshot_ts=1771488814177; 2026-02-19T08:14:50.469135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:14:50.469180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:50.469216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:50.469311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:14:50.469521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.116000s; 2026-02-19T08:14:50.483180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:14:50.483663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:14:50.483732Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:14:50.483849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=5; 2026-02-19T08:14:50.483962Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488517247;tx_id=18446744073709551615;;current_snapshot_ts=1771488814177; 2026-02-19T08:14:50.484024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:14:50.484080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:50.484130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:14:50.484241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:189;event=skip_actualization;waiting=1.000000s; 2026-02-19T08:14:50.484325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:14:50.485189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.017000s; 2026-02-19T08:14:50.485240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:7348:8977];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; >> TDistconfGenerateConfigTest::UsedNodes >> TDistconfGenerateConfigTest::UsedNodes [GOOD] >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true >> KqpScheme::DropTransfer_QueryService [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:37.050716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:37.239017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:37.258190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:37.258268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:37.258759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:37.551327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:37.551488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:37.635101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488753826931 != 1771488753826935 2026-02-19T08:12:37.647898Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:37.695179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:37.797083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:38.094346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:38.108085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:38.221958Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:12:38.222033Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:12:38.222221Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:12:38.388190Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:12:38.388296Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:12:38.389046Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:12:38.389161Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:12:38.389547Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:12:38.389727Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:12:38.389871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:12:38.390199Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:12:38.392125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.393607Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:12:38.393698Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:12:38.433604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:38.435020Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:38.435356Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:12:38.435683Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:38.448196Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:38.486923Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:38.487141Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:38.489005Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:38.489102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:38.489152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:38.489601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:38.489772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:38.489889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:12:38.502317Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:38.604661Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:38.604898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:38.605046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:12:38.605094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:38.605150Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:38.605193Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:38.605442Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.605499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.606040Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:38.606157Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:38.606251Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:38.606304Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:38.606368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:38.606408Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:38.606446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:38.606489Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:38.606680Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:38.606818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.606884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.606952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:12:38.607060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:12:38.607109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:38.607227Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:38.607475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:38.607539Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:38.607627Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... efault, DatabaseId: /Root, IsStreamingQuery: 0}. ExecuteDatashardTransaction trace_id_verbosity# 0 trace_id# 2026-02-19T08:14:53.271020Z node 13 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 1 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:14:53.271077Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Datashard not finished yet actor_state# WaitResolveState shard_id# 72075186224037888 state# Executing trace_id# 2026-02-19T08:14:53.271133Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), trace_id# 2026-02-19T08:14:53.271181Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:14:53.271493Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [13:1191:2882], Recipient [13:1160:2966]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1191 RawX2: 55834577730 } TxBody: " \0018\001j3\010\001\032\'\n#\t\005\000\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010&8\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976710665 ExecLevel: 0 Flags: 8 2026-02-19T08:14:53.271545Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:14:53.271686Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435074, Sender [13:1160:2966], Recipient [13:1160:2966]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:14:53.271724Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3243: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:14:53.271796Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:14:53.271984Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976710661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-02-19T08:14:53.272075Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit CheckDataTx 2026-02-19T08:14:53.272128Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is Executed 2026-02-19T08:14:53.272168Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit CheckDataTx 2026-02-19T08:14:53.272202Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:14:53.272232Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:14:53.272274Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/281474976710663 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2026-02-19T08:14:53.272319Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976710665] at 72075186224037888 2026-02-19T08:14:53.272355Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is Executed 2026-02-19T08:14:53.272381Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:14:53.272405Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710665] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:14:53.272429Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:14:53.272457Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is Executed 2026-02-19T08:14:53.272481Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:14:53.272509Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-02-19T08:14:53.272536Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit ExecuteKqpDataTx 2026-02-19T08:14:53.272614Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:274: Operation [0:281474976710665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-02-19T08:14:53.272779Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-02-19T08:14:53.272894Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:520: add locks to result: 0 2026-02-19T08:14:53.272990Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is Executed 2026-02-19T08:14:53.273028Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-02-19T08:14:53.273057Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710665] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:14:53.273090Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit FinishPropose 2026-02-19T08:14:53.273147Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976710665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:14:53.273288Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is DelayComplete 2026-02-19T08:14:53.273323Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:14:53.273357Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710665] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:14:53.273420Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710665] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:14:53.273487Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710665] at 72075186224037888 is Executed 2026-02-19T08:14:53.273520Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710665] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:14:53.273550Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976710665] at 72075186224037888 has finished 2026-02-19T08:14:53.273617Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:14:53.273654Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976710665] at 72075186224037888 on unit FinishPropose 2026-02-19T08:14:53.273695Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:53.273925Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037888 status# COMPLETE error# trace_id# 2026-02-19T08:14:53.274118Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:14:53.274296Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [13:1191:2882] TxId: 281474976710665. Ctx: { TraceId: 01khtfensz11cmt03556tzk2vn, Database: , SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:14:53.274518Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3455} SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, ActorId: [13:1049:2882], ActorState: CleanupState, LegacyTraceId: 01khtfensz11cmt03556tzk2vn, EndCleanup is_final# false trace_id# 2026-02-19T08:14:53.274769Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3170} SessionId: ydb://session/3?node_id=13&id=ZjQ3M2YyMWItZDZmNjk1Zi1jYzFhNjkwYi1jYzhhOGIwNQ==, ActorId: [13:1049:2882], ActorState: CleanupState, LegacyTraceId: 01khtfensz11cmt03556tzk2vn, Sent query response back to proxy proxy_request_id# 8 proxy_id# [13:67:2114] trace_id# 2026-02-19T08:14:53.585736Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [13:1200:2992], Recipient [13:1160:2966]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:53.585877Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:14:53.585990Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [13:1199:2991], serverId# [13:1200:2992], sessionId# [0:0:0] 2026-02-19T08:14:53.586297Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553224, Sender [13:834:2728], Recipient [13:1160:2966]: NKikimr::TEvDataShard::TEvGetOpenTxs >> TBlobStorageWardenTest::TestFilterBadSerials >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootCounters >> TBlobStorageWardenTest::TestHttpMonPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 10 } Ring { Node: 11 } Ring { Node: 12 } Ring { Node: 13 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 12 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases >> TDistconfGenerateConfigTest::GenerateConfig3DCCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases [GOOD] >> TDistconfGenerateConfigTest::IgnoreNodes [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpOlapScheme::DropColumnAndResetTtl [GOOD] Test command err: 2026-02-19T08:13:49.609002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486588079306549:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:49.609671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:49.869805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:49.895184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:49.895310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:49.987008Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486588079306505:2081] 1771488829600408 != 1771488829600411 2026-02-19T08:13:50.004825Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:50.007967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:50.093961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:50.167682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:50.167705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:50.167714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:50.167821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:50.634058Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:50.744172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) CREATE TABLE `/Root/ColumnTableTest` (id Int32 NOT NULL, id_second Int32 NOT NULL, level Int32, created_at Timestamp NOT NULL, PRIMARY KEY (id, id_second)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16); 2026-02-19T08:13:53.023464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486605259176591:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.023594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.023987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486605259176601:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.024074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.307622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:13:53.522293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:53.522293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:53.522541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:53.522787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:53.522923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:53.523015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:53.523086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:53.523111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:53.523239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:53.523308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:53.523405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:53.523450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:53.523539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:53.523611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:53.523654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:53.523796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:53.523799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:53.523907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:53.523922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:53.524056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608486605259176799:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:53.524111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:53.524242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:53.524443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:53.524578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:53.524694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:53.524799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608486605259176784:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:53.573 ... t_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:14:53.811252Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:14:53.811530Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:14:53.811569Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:14:53.811708Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:14:53.811767Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:14:53.811825Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:14:53.811859Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:14:53.811906Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:14:53.811938Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:14:53.812185Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:14:53.812228Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:14:53.812390Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:14:53.812446Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:14:53.812509Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:14:53.812551Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:14:53.812601Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:14:53.812634Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:14:53.812793Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:14:53.812836Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-02-19T08:14:53.812951Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-02-19T08:14:53.812996Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:14:53.835449Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[9:7608486861965032614:2341];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=137051076684896;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771488893834;max=18446744073709551615;plan=0;src=[9:7608486836195228246:2152];cookie=12:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:14:53.858326Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:14:53.858421Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:14:53.858451Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:14:53.934194Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486861965032683:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:53.934325Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:53.934791Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486861965032686:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:53.934850Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:53.970993Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2026-02-19T08:14:53.995061Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "max_pk_int" StorageId: "__LOCAL_METADATA" InheritPortionStorage: true ClassName: "MAX" MaxIndex { ColumnId: 2 } } Options { SchemeNeedActualization: false } ; 2026-02-19T08:14:54.074196Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486866260000008:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.074341Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.074665Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486866260000011:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.074713Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.115548Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2026-02-19T08:14:54.198180Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486866260000044:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.198321Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.198739Z node 9 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7608486866260000047:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.198811Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:54.253370Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2026-02-19T08:14:54.262303Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:305;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 DropColumns: 2 DefaultCompression { } Options { SchemeNeedActualization: false } ; >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> KqpScheme::CreateTransfer [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::BinaryJsonOffsetNormal [GOOD] Test command err: 2026-02-19T08:14:37.815936Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486792535728893:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:37.816051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:37.882719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:14:38.257820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:38.279557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:38.279673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:38.331690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:38.385423Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:38.393876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486792535728647:2081] 1771488877751593 != 1771488877751596 2026-02-19T08:14:38.446710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:38.568679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:38.568708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:38.568719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:38.568817Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:38.811801Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:39.420745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:39.429638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:39.588123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:39.783621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:40.000231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:40.093181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.276258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486814010567007:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:42.276391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:42.289790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486814010567017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:42.289970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:42.755937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.803802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.817815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486792535728893:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:42.821154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:42.843323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.883881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.936092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:42.987205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.061826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.141804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:43.320281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486818305535188:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.320373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.321113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486818305535193:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.321162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486818305535194:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.321290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:43.325901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:43.346780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:14:43.347231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486818305535197:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:43.421196Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486818305535249:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/ ... gacyTraceId: 01khtfeeyz03fw3b115cqmpky2, Internal erroryql/essentials/types/binary_json/read.cpp:159: StringOffset must be inside buffer trace_id# 2026-02-19T08:14:46.044829Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZDE0OThmNmQtMmYzNDE2YjItZWE0Y2RkMDYtY2MzMGMyMw==, ActorId: [1:7608486826895470162:2541], ActorState: ExecuteState, LegacyTraceId: 01khtfeeyz03fw3b115cqmpky2, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:159: StringOffset must be inside buffer status# INTERNAL_ERROR issues# trace_id# 2026-02-19T08:14:47.300214Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486838904797458:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:47.302798Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:47.359521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:47.492130Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:47.509100Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:47.509203Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:47.540601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:47.685919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:47.758578Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:47.758618Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:47.758627Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:47.758725Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:48.286014Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:48.428216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:48.457429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.600483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.800077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:48.882355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.089825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486860379635752:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.089947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.090743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486860379635762:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.090803Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.179119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.231449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.290803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.302939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486838904797458:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:52.303413Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:52.352078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.412159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.502575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.592831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.664901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.768445Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486860379636635:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.768537Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.769209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486860379636640:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.769308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608486860379636641:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.769423Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.773634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:52.798355Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608486860379636644:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:52.905096Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608486860379636695:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TBlobStorageWardenTest::TestSendToInvalidGroupId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::DropTransfer_QueryService [GOOD] Test command err: 2026-02-19T08:13:48.659156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486582347070802:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:48.659390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:48.958773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:48.958905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:49.000630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:49.031452Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486582347070751:2081] 1771488828646728 != 1771488828646731 2026-02-19T08:13:49.040411Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:49.049395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:49.155547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:49.155571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:49.155609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:49.155717Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:49.165489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:49.570134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:49.579770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:13:49.645523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.682934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:49.802273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.981574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:50.050267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.017015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599526941811:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.017126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.017415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599526941821:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.017479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.362514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.401261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.436884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.477096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.508279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.541037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.614581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.657313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.720825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599526942692:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.720920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.720986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599526942697:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.721266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599526942699:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.721316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.724782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:52.735121Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486599526942700:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:52.838101Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486599526942752:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:53.658689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486582347070802:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:53.658750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:54.390512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... tialize from file: (empty maybe) 2026-02-19T08:14:45.112427Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:45.463671Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:45.586260Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:46.449874Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:46.466200Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.618399Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.914867Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:47.075465Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:49.577846Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608486824390734292:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:49.577953Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:50.703302Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486850160539882:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:50.703407Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:50.703897Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486850160539892:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:50.703956Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:50.801841Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:50.849739Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:50.907344Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:50.954598Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.002286Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.058689Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.107583Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.208509Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.434883Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486854455508061:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.435000Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.435310Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486854455508067:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.435356Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.435379Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486854455508066:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.439946Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:51.464413Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608486854455508070:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:51.549807Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608486854455508121:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:55.213214Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:55.520189Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:14:55.627414Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTransferCascade, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp:359) 2026-02-19T08:14:55.637416Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:14:55.658076Z node 8 :REPLICATION_CONTROLLER ERROR: stream_consumer_remover.cpp:61: [StreamConsumerRemover][rid 1][tid 1] Error: status# NOT_FOUND, issues# {
: Error: some consumers in drop_consumers are missing already, code: 500003 } 2026-02-19T08:14:55.658238Z node 8 :REPLICATION_CONTROLLER ERROR: tx_drop_stream_result.cpp:59: [controller 72075186224037930][TxDropStreamResult] Drop stream error: rid# 1, tid# 1, status# NOT_FOUND, issue# {
: Error: some consumers in drop_consumers are missing already, code: 500003 } 2026-02-19T08:14:55.674622Z node 8 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 8, TabletId: 72075186224037930 not found >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 17450012066444943223 SEND TEvPut with key [1:1:1:0:0:100:0] 2026-02-19T08:14:56.279738Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-02-19T08:14:56.280222Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-02-19T08:14:56.374266Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] Test command err: Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } Ring { Node: 2 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 6 } Ring { Node: 11 } Ring { Node: 16 } Ring { Node: 21 } Ring { Node: 26 } Ring { Node: 31 } Ring { Node: 36 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 11 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] Test command err: Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 3 } Ring { Node: 5 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 19 } Ring { Node: 20 } Ring { Node: 21 } Ring { Node: 37 } Ring { Node: 38 } Ring { Node: 39 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 10 } Ring { Node: 19 } } } Expected: NToSelect: 3 >> TBlobStorageWardenTest::TestDeleteStoragePool >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |90.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:12:36.784501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:12:36.941117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:12:36.969379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:12:36.969462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:12:36.970228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:12:37.303429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:12:37.303564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:12:37.382991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771488753565219 != 1771488753565223 2026-02-19T08:12:37.395002Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:12:37.441510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:12:37.535199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:12:37.836835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:12:37.850301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:12:37.950983Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:12:37.951060Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:12:37.951195Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:12:38.079624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:12:38.079732Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:12:38.080475Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:12:38.080606Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:12:38.080954Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:12:38.081082Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:12:38.081241Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:12:38.081532Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:12:38.083399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:38.084556Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:12:38.084639Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:12:38.116726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:12:38.117759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:12:38.118153Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:12:38.118440Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:12:38.128292Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:12:38.168268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:12:38.168410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:12:38.170179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:12:38.170281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:12:38.170338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:12:38.170731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:12:38.170915Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:12:38.170997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:12:38.171522Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:12:38.211048Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:12:38.211254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:12:38.211373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:12:38.211434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:12:38.211542Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:12:38.211578Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:12:38.211801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.211855Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:12:38.212173Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:12:38.212271Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:12:38.212336Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:12:38.212384Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:12:38.212429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:12:38.212468Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:12:38.212530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:12:38.212565Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:12:38.212621Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:12:38.212707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.212743Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:12:38.212782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:12:38.212879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:12:38.212931Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:12:38.213031Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:12:38.213278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:12:38.213357Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:12:38.213452Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transactio ... XECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [14:1203:2882] TxId: 281474976715665. Ctx: { TraceId: 01khtferwr89v5ddb5bz7cegd3, Database: , SessionId: ydb://session/3?node_id=14&id=Zjg5MDBiYS00NmEzYjlhMi01MGRjMGI3LWIxNTU3NmZj, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-02-19T08:14:56.027960Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=14&id=Zjg5MDBiYS00NmEzYjlhMi01MGRjMGI3LWIxNTU3NmZj, ActorId: [14:1052:2882], ActorState: ExecuteState, LegacyTraceId: 01khtferwr89v5ddb5bz7cegd3, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/table`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-02-19T08:14:56.029136Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 278003712, Sender [14:1073:2882], Recipient [14:900:2777]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } Op: Rollback } 2026-02-19T08:14:56.029185Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-02-19T08:14:56.029315Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-02-19T08:14:56.029369Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3155: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... waiting for blocked lock status ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 ... waiting for blocked lock status (done) 2026-02-19T08:14:56.031967Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 65543, Sender [14:834:2728], Recipient [14:900:2777]: NActors::TEvents::TEvPoison 2026-02-19T08:14:56.032796Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-02-19T08:14:56.032896Z node 14 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:14:56.060100Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [14:1208:3012], Recipient [14:1210:3013]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:14:56.068388Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [14:1208:3012], Recipient [14:1210:3013]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:14:56.068633Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828684, Sender [14:1208:3012], Recipient [14:1210:3013]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:14:56.073335Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1210:3013] 2026-02-19T08:14:56.073961Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:14:56.082360Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:14:56.084101Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:14:56.087845Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:14:56.088004Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:14:56.088117Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:14:56.088891Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:14:56.089237Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:14:56.089340Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:14:56.089430Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2026-02-19T08:14:56.089635Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:14:56.090183Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:14:56.090433Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [14:1224:3020] 2026-02-19T08:14:56.090513Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:14:56.090593Z node 14 :TX_DATASHARD INFO: datashard.cpp:1297: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2026-02-19T08:14:56.090663Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:56.091234Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [14:71:2118], Recipient [14:1210:3013]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2026-02-19T08:14:56.091589Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [14:1210:3013], Recipient [14:1210:3013]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:14:56.091648Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:14:56.091993Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435075, Sender [14:1210:3013], Recipient [14:1210:3013]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2026-02-19T08:14:56.092046Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3244: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2026-02-19T08:14:56.093379Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1210:3013]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 700} 2026-02-19T08:14:56.093445Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3254: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-02-19T08:14:56.093517Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 700 2026-02-19T08:14:56.093603Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:14:56.094515Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:14:56.094608Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2026-02-19T08:14:56.095180Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-02-19T08:14:56.095276Z node 14 :TX_DATASHARD INFO: datashard.cpp:4137: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-02-19T08:14:56.095353Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-02-19T08:14:56.095755Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1210:3013]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 500 NextReadStep# 700 ReadStep# 700 } 2026-02-19T08:14:56.095815Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3255: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-02-19T08:14:56.095901Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 500 next step 700 2026-02-19T08:14:56.096070Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [14:1210:3013], Recipient [14:1109:2925]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-02-19T08:14:56.096117Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:14:56.096195Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-02-19T08:14:56.096367Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-02-19T08:14:56.096474Z node 14 :TX_DATASHARD INFO: datashard_pipeline.cpp:734: Outdated readset for 600:281474976715663 at 72075186224037889 2026-02-19T08:14:56.096563Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:14:56.096657Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-02-19T08:14:56.096934Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [14:1109:2925], Recipient [14:1210:3013]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-02-19T08:14:56.096986Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:14:56.097070Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-02-19T08:14:56.097186Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:14:56.097366Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:14:56.255497Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> VDiskBalancing::TestRandom_Block42 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts >> THiveTest::TestExternalBootCounters [GOOD] >> THiveTest::TestDownCounterDeleteNode >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:21.072045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:21.072164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:21.072257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:21.072308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:21.072376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:21.072416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:21.072447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:21.072475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:21.072556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:21.072646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:21.073620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:21.074247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:21.179167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:21.179216Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:21.206333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:21.207835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:21.208074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:21.219852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:21.220666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:21.221398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:21.221742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:21.225278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.225470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:21.226881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:21.226960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:21.227081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:21.227125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:21.227166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:21.227372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.370257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.371480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.371616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.371712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.371875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.371955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.372693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... angeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7882 Memory: 124456 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 130 TableOwnerId: 72057594046678944 FollowerId: 0 2026-02-19T08:14:58.327581Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:14:58.327647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.7882 2026-02-19T08:14:58.327784Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:14:58.327836Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:14:58.364740Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:58.364824Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:58.364894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-02-19T08:14:58.365000Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 1 2026-02-19T08:14:58.365050Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-02-19T08:14:58.365233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-02-19T08:14:58.365312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-02-19T08:14:58.365355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-02-19T08:14:58.365520Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-02-19T08:14:58.365730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2026-02-19T08:14:58.365793Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:228: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-02-19T08:14:58.365879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:607: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-02-19T08:14:58.365983Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:14:58.366377Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:717:2701]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-02-19T08:14:58.366573Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:148: Started compaction# 1 with cookie 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-02-19T08:14:58.368359Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:14:58.370091Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2026-02-19T08:14:58.385072Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [3:706:2692], Recipient [3:717:2701]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:14:58.390340Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.241000Z 2026-02-19T08:14:58.390466Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2026-02-19T08:14:58.390535Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:266: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152], pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:14:58.390929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553211, Sender [3:717:2701], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-02-19T08:14:58.390982Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5375: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-02-19T08:14:58.391099Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-02-19T08:14:58.391226Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:113: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 29.996000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2026-02-19T08:14:58.391907Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:717:2701]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-02-19T08:14:58.392046Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:148: Started compaction# 2 with cookie 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-02-19T08:14:58.393822Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.241000Z 2026-02-19T08:14:58.393863Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2026-02-19T08:14:58.400155Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [3:706:2692], Recipient [3:717:2701]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:14:58.401057Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:196: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.241000Z 2026-02-19T08:14:58.402462Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [3:706:2692], Recipient [3:717:2701]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:14:58.409839Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.245000Z 2026-02-19T08:14:58.409939Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2026-02-19T08:14:58.409972Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:266: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152], pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:14:58.410282Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553211, Sender [3:717:2701], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-02-19T08:14:58.410315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5375: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-02-19T08:14:58.410405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2026-02-19T08:14:58.410456Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:113: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.992000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-02-19T08:14:58.412701Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [3:706:2692], Recipient [3:717:2701]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:14:58.423519Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:58.423605Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:14:58.423635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:14:58.449881Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:196: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.245000Z ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateTransfer [GOOD] Test command err: 2026-02-19T08:13:49.164835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486587305623619:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:49.165359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:49.395342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:49.422915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:49.423022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:49.478796Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:49.495490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:49.597921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:49.597950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:49.597961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:49.598109Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:49.630644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:50.054706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:50.064424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:13:50.119321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:50.194511Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:50.304584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:50.537992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:50.632193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.609327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486600190527312:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.609477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.609815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486600190527322:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.609894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.976604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.005242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.044126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.075005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.107923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.153654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.187319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.233214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:53.309112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486604485495488:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.309236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.309528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486604485495493:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.309566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486604485495494:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.309659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:53.314131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:53.325593Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486604485495497:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:53.400632Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486604485495548:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:54.163432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486587305623619:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:54.163520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:55.792968Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486614394188700:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:55.793832Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error= ... rd__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:46.217948Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.411197Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.844890Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:46.977595Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.349399Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486855221372778:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.349509Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.350428Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486855221372790:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.350560Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:51.462457Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.517859Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.574986Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.667162Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.752911Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.866513Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:51.946792Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.044320Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:52.248755Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486859516340968:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.248885Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.249390Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486859516340973:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.249450Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608486859516340974:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.249611Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:52.264222Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:52.305251Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608486859516340977:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:52.385860Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608486859516341040:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:14:56.438550Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608486876696210576:4010] txid# 281474976710673, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-02-19T08:14:56.488244Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:56.880173Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:14:57.045224Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:14:57.149470Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:14:57.282544Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:14:57.302491Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:14:57.353839Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:14:57.363666Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTransfer, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:14:57.387896Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608486880991178505:4376] txid# 281474976710683, issues: { message: "Check failed: path: \'/Root/topic\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 55], type: EPathTypePersQueueGroup, state: EPathStateAlter)" severity: 1 } >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 9781946963485141107 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2026-02-19T08:14:57.401316Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> CheckIntegrityBlock42::DataOk >> CheckIntegrityBlock42::PlacementWrongDisks |90.8%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] Test command err: 2026-02-19T08:14:56.450575Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.450708Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.450787Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.452610Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.455097Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.455217Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.460654Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.460798Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.460879Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.466286Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.466408Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.466528Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.466600Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.466951Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:7:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:7:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.470761Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.470890Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:7:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:7:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.471202Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.476898Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.477030Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.477113Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.479386Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.479499Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.479577Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ynks/0051ba/r3tmp/tmpbcm9LK/pdisk_1.dat 2026-02-19T08:14:56.676356Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.676482Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.676557Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.676638Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.676768Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:7:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:7:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.677492Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:56.677601Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:57.774745Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [ced0949902bd0a34] bootstrap ActorId# [1:1178:3030] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1370:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:14:57.774966Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775024Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775053Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775081Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775112Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775141Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775170Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775199Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775228Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775254Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775282Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775311Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775339Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775369Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775397Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033:2:8:0:0:1370:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:57.775427Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [ced0949902bd0a34] Id# [72057594037932033: ... ealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } - Test case 1a - repeat the same ApplyServiceSet request Creating pdisk 2026-02-19T08:15:00.594544Z node 3 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:858} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 3 ServiceSet { PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { } EntityStatus: CREATE } } } 2026-02-19T08:15:00.594673Z node 3 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:876} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 3 ServiceSet { PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { } EntityStatus: CREATE } } } 2026-02-19T08:15:00.594769Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { } EntityStatus: CREATE } } 2026-02-19T08:15:00.594915Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:171} Inferred PDisk slot count Path# SectorMap:TestInferPDiskSlotCount:2400 SlotCount# 12 SlotSizeInUnits# 2 FromDriveSize# 2576980377600 FromUnitSize# 107374182400 FromMaxSlots# 16 2026-02-19T08:15:00.594988Z node 3 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:553} ApplyServiceSetPDisks PDiskId# 1002 NewExpectedSlotCount# 12 OldExpectedSlotCount# 12 NewSlotSizeInUnits# 2 OldSlotSizeInUnits# 2 2026-02-19T08:15:00.595422Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.091215 2026-02-19T08:15:00.595556Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.595609Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:00.595658Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:395} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-02-19T08:15:00.595738Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000006 PDiskId# 1002 2026-02-19T08:15:00.595813Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:350} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2565120001860 PDiskId# 1002 2026-02-19T08:15:00.595965Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120001860 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.099815 2026-02-19T08:15:00.596062Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.596131Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120001860 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:00.596183Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:395} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 - Test case 1b - change InferPDiskSlotCountSettings insignificantly 2026-02-19T08:15:00.605643Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:171} Inferred PDisk slot count Path# SectorMap:TestInferPDiskSlotCount:2400 SlotCount# 12 SlotSizeInUnits# 2 FromDriveSize# 2576980377600 FromUnitSize# 107374182400 FromMaxSlots# 18 - Test case 2 - update InferPDiskSlotCountSettings 2026-02-19T08:15:00.618357Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:171} Inferred PDisk slot count Path# SectorMap:TestInferPDiskSlotCount:2400 SlotCount# 24 SlotSizeInUnits# 1 FromDriveSize# 2576980377600 FromUnitSize# 107374182400 FromMaxSlots# 24 2026-02-19T08:15:00.618461Z node 3 :BS_NODE DEBUG: {NW112@node_warden_impl.cpp:1300} SendChangeExpectedSlotCount from config notification PDiskId# 1002 ExpectedSlotCount# 24 SlotSizeInUnits# 1 2026-02-19T08:15:00.618655Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:208} CreateReqFromEv with sender ev# {EvChangeExpectedSlotCount ExpectedSlotCount# 24 SlotSizeInUnits# 1} Sender# 7 ReqId# 10020000007 PDiskId# 1002 2026-02-19T08:15:00.618723Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120002130 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.122726 2026-02-19T08:15:00.618891Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TChangeExpectedSlotCount OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.618939Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120002130 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:00.619094Z node 3 :BS_NODE DEBUG: {NW108@node_warden_impl.cpp:672} Handle(NPDisk::TEvChangeExpectedSlotCountResult) Msg# {EvChangeExpectedSlotCountResult Status#OK ErrorReason# ""} 2026-02-19T08:15:00.703179Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000008 PDiskId# 1002 2026-02-19T08:15:00.703291Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120002360 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.207273 2026-02-19T08:15:00.705870Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.705956Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120002360 PushRequestToScheduler Push to FastOperationsQueue.size# 1 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 24 NumActiveSlots: 0 SlotSizeInUnits: 1 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 24 SlotSizeInUnits: 1 PDiskUsage: 0 } - Test case 3 - set ExpectedSlotCount explicitly Creating pdisk 2026-02-19T08:15:00.706694Z node 3 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:858} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 3 ServiceSet { PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 17 } EntityStatus: CREATE } } } 2026-02-19T08:15:00.706813Z node 3 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:876} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 3 ServiceSet { PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 17 } EntityStatus: CREATE } } } 2026-02-19T08:15:00.706909Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 3 PDiskID: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" PDiskGuid: 0 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 17 } EntityStatus: CREATE } } 2026-02-19T08:15:00.707047Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:150} Skipped inferring PDisk slot count, using explicit settings Path# SectorMap:TestInferPDiskSlotCount:2400 SlotCount# 17 SlotSizeInUnits# 0 2026-02-19T08:15:00.707118Z node 3 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:553} ApplyServiceSetPDisks PDiskId# 1002 NewExpectedSlotCount# 17 OldExpectedSlotCount# 24 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 1 2026-02-19T08:15:00.707161Z node 3 :BS_NODE DEBUG: {NW107@node_warden_pdisk.cpp:559} SendChangeExpectedSlotCount PDiskId# 1002 ExpectedSlotCount# 17 SlotSizeInUnits# 0 2026-02-19T08:15:00.707307Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:208} CreateReqFromEv with sender ev# {EvChangeExpectedSlotCount ExpectedSlotCount# 17 SlotSizeInUnits# 0} Sender# 7 ReqId# 10020000009 PDiskId# 1002 2026-02-19T08:15:00.707365Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120002642 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.211359 2026-02-19T08:15:00.707488Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TChangeExpectedSlotCount OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.707528Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120002642 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:00.707643Z node 3 :BS_NODE DEBUG: {NW108@node_warden_impl.cpp:672} Handle(NPDisk::TEvChangeExpectedSlotCountResult) Msg# {EvChangeExpectedSlotCountResult Status#OK ErrorReason# ""} 2026-02-19T08:15:00.814351Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000010 PDiskId# 1002 2026-02-19T08:15:00.814457Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1002 ReqId# 2565120002872 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3608.318426 2026-02-19T08:15:00.814763Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-02-19T08:15:00.814825Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1002 ReqId# 2565120002872 PushRequestToScheduler Push to FastOperationsQueue.size# 1 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 17 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 17 SlotSizeInUnits: 0 PDiskUsage: 0 } |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:10:34.919908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:10:34.920007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:34.920095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:10:34.920153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:10:34.920189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:10:34.920227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:10:34.920282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:10:34.920345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:10:34.920420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:10:34.920509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:10:34.921392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:34.921764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:10:35.069830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:10:35.069908Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:10:35.070778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:10:35.096382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:10:35.096493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:10:35.096669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:10:35.114984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:10:35.119980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:10:35.120589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:35.120909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:10:35.124595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:35.124756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:10:35.126316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:10:35.126391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:10:35.126655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:10:35.126714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:10:35.126772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:10:35.127126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.133631Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:246:2058] recipient: [1:15:2062] 2026-02-19T08:10:35.276999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:10:35.277230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.277391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:10:35.277455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:10:35.277628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:10:35.277686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:10:35.279624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:35.279815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:10:35.279982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.280042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:10:35.280083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:10:35.280134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:10:35.281784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.281842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:10:35.281886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:10:35.283238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.283285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:10:35.283358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:35.283444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:10:35.286472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:10:35.288240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:10:35.288419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:10:35.289305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:10:35.289455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:10:35.289514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:35.289754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:10:35.289806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:10:35.289985Z node 1 :FLAT_TX_SCH ... as 4 2026-02-19T08:14:55.399074Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-02-19T08:14:55.399107Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-02-19T08:14:55.399140Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-02-19T08:14:55.399170Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-02-19T08:14:55.399202Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-02-19T08:14:55.401537Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.401677Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.401749Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:14:55.401794Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-02-19T08:14:55.401844Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-02-19T08:14:55.403327Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.403434Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.403469Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:14:55.403503Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-02-19T08:14:55.403541Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-02-19T08:14:55.404795Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.404886Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.404921Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:14:55.404954Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-02-19T08:14:55.404992Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-02-19T08:14:55.406304Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.406393Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-02-19T08:14:55.406430Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-02-19T08:14:55.406464Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-02-19T08:14:55.406502Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 3 2026-02-19T08:14:55.406591Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-02-19T08:14:55.411400Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:14:55.411589Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:14:55.427385Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-02-19T08:14:55.427619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-02-19T08:14:55.429634Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-02-19T08:14:55.429696Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-02-19T08:14:55.436849Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-02-19T08:14:55.437026Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.437074Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:3910:5625] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-02-19T08:14:55.447902Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-02-19T08:14:55.448003Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-02-19T08:14:55.448128Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-02-19T08:14:55.448161Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-02-19T08:14:55.448241Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-02-19T08:14:55.448272Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-02-19T08:14:55.448346Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-02-19T08:14:55.448382Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-02-19T08:14:55.448483Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-02-19T08:14:55.448524Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-02-19T08:14:55.458470Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-02-19T08:14:55.459108Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.459168Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:3913:5628] 2026-02-19T08:14:55.459559Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-02-19T08:14:55.460060Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-02-19T08:14:55.460315Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-02-19T08:14:55.460424Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.460468Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:3913:5628] 2026-02-19T08:14:55.460762Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-02-19T08:14:55.460880Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.460928Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:3913:5628] 2026-02-19T08:14:55.461114Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.461154Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:3913:5628] 2026-02-19T08:14:55.461395Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-02-19T08:14:55.461448Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:3913:5628] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-02-19T08:14:52.506054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:52.540185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:52.540297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:52.557339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:52.768369Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:52.771273Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486853297715238:2081] 1771488891930797 != 1771488891930800 2026-02-19T08:14:52.833791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:52.955885Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:53.218125Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486853297715507:2106] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:53.218183Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486861887650409:2269] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:53.218331Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608486857592682826:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:14:53.218447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608486857592682986:2203][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608486857592682826:2119], cookie# 1 2026-02-19T08:14:53.220396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486857592683043:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683040:2203], cookie# 1 2026-02-19T08:14:53.220433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486857592683044:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683041:2203], cookie# 1 2026-02-19T08:14:53.220446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486857592683045:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683042:2203], cookie# 1 2026-02-19T08:14:53.220522Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486853297715206:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683043:2203], cookie# 1 2026-02-19T08:14:53.220560Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486853297715209:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683044:2203], cookie# 1 2026-02-19T08:14:53.220576Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486853297715212:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486857592683045:2203], cookie# 1 2026-02-19T08:14:53.220616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486857592683043:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486853297715206:2049], cookie# 1 2026-02-19T08:14:53.220631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486857592683044:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486853297715209:2052], cookie# 1 2026-02-19T08:14:53.220660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486857592683045:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486853297715212:2055], cookie# 1 2026-02-19T08:14:53.220711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486857592682986:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486857592683040:2203], cookie# 1 2026-02-19T08:14:53.220733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608486857592682986:2203][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:14:53.220748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486857592682986:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486857592683041:2203], cookie# 1 2026-02-19T08:14:53.220798Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608486857592682986:2203][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:14:53.220829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486857592682986:2203][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486857592683042:2203], cookie# 1 2026-02-19T08:14:53.220939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608486857592682986:2203][/dc-1] Sync cookie mismatch: sender# [1:7608486857592683042:2203], cookie# 1, current cookie# 0 2026-02-19T08:14:53.220992Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608486857592682826:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:14:53.230887Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608486857592682826:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608486857592682986:2203] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:14:53.231029Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608486857592682826:2119], cacheItem# { Subscriber: { Subscriber: [1:7608486857592682986:2203] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:14:53.233737Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608486861887650410:2270], recipient# [1:7608486861887650409:2269], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:14:53.233824Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486861887650409:2269] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:53.267666Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486861887650409:2269] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:14:53.271864Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486861887650409:2269] Handle TEvDescribeSchemeResult Forward to# [1:7608486861887650408:2268] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:53.275321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486853297715507:2106] Handle TEvProposeTransaction 2026-02-19T08:14:53.275359Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486853297715507:2106] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:14:53.275442Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486853297715507:2106] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486861887650415:2274] 2026-02-19T08:14:53.419349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486861887650415:2274] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:14:53.419422Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486861887650415:2274] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:53.419496Z node 1 :TX_PROXY DEBUG: schemereq.c ... meshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:14:57.654420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:14:57.654433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-02-19T08:14:57.654460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:14:57.654547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:14:57.654558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-02-19T08:14:57.654593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:14:57.655284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-02-19T08:14:57.655318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-02-19T08:14:57.655408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:14:57.655430Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:14:57.655568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-02-19T08:14:57.656552Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7608486881677361572:2091] Handle TEvNavigate describe path /dc-1 2026-02-19T08:14:57.656574Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7608486881677362227:2520] HANDLE EvNavigateScheme /dc-1 2026-02-19T08:14:57.656661Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608486881677361612:2103], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:14:57.656724Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7608486881677361694:2125][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7608486881677361612:2103], cookie# 4 2026-02-19T08:14:57.656774Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608486881677361718:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361715:2125], cookie# 4 2026-02-19T08:14:57.656790Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608486881677361719:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361716:2125], cookie# 4 2026-02-19T08:14:57.656803Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7608486881677361722:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361717:2125], cookie# 4 2026-02-19T08:14:57.656823Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608486881677361357:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361718:2125], cookie# 4 2026-02-19T08:14:57.656844Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608486881677361360:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361719:2125], cookie# 4 2026-02-19T08:14:57.656859Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7608486881677361363:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7608486881677361722:2125], cookie# 4 2026-02-19T08:14:57.656904Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608486881677361718:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361357:2049], cookie# 4 2026-02-19T08:14:57.656926Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608486881677361719:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361360:2052], cookie# 4 2026-02-19T08:14:57.656941Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7608486881677361722:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361363:2055], cookie# 4 2026-02-19T08:14:57.656990Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608486881677361694:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361715:2125], cookie# 4 2026-02-19T08:14:57.657008Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7608486881677361694:2125][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:14:57.657029Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608486881677361694:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361716:2125], cookie# 4 2026-02-19T08:14:57.657047Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7608486881677361694:2125][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:14:57.657081Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7608486881677361694:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7608486881677361717:2125], cookie# 4 2026-02-19T08:14:57.657099Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7608486881677361694:2125][/dc-1] Sync cookie mismatch: sender# [2:7608486881677361717:2125], cookie# 4, current cookie# 0 2026-02-19T08:14:57.657133Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7608486881677361612:2103], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:14:57.657190Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7608486881677361612:2103], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7608486881677361694:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771488897647 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:14:57.657252Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608486881677361612:2103], cacheItem# { Subscriber: { Subscriber: [2:7608486881677361694:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771488897647 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-02-19T08:14:57.657371Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608486881677362228:2521], recipient# [2:7608486881677362227:2520], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:14:57.657396Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7608486881677362227:2520] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:57.657453Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7608486881677362227:2520] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:14:57.657933Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7608486881677362227:2520] Handle TEvDescribeSchemeResult Forward to# [2:7608486881677362226:2519] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1771488897647 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> KqpIndexes::DuplicateUpsertInterleave-UseStreamIndex [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink-UseStreamIndex >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |90.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |90.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> CheckIntegrityBlock42::DataOk [GOOD] >> CheckIntegrityBlock42::DataOkAdditionalEqualParts >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 |90.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> CheckIntegrityBlock42::PlacementWrongDisks [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> THiveTest::TestDownCounterDeleteNode [GOOD] >> TColumnShardTestSchema::ProposeResultNotViaPipe >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] >> TColumnShardTestSchema::HotTiers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> CheckIntegrityBlock42::DataOkAdditionalEqualParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn >> CheckIntegrityMirror3dc::DataErrorOneCopy [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] Test command err: 2026-02-19T08:14:58.254843Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.257055Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.263732Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.263858Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.266081Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.269176Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.269274Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.269437Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.272236Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.279739Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.279859Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.279908Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.281432Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.281515Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.281584Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.281682Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.284282Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.285947Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.286031Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.288495Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.288590Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.288663Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.290915Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.291517Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.292502Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.293303Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ynks/0051ab/r3tmp/tmpKql0ml/pdisk_1.dat Formatting PDisk with guid1 9609391493768770024 Creating PDisk with guid2 16459637873924242344 Creating pdisk 2026-02-19T08:14:59.618068Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:583} PDiskId# 1001 Can't start due to a guid error expected# 16459637873924242344 on-disk# 9609391493768770024 PDiskId# 1001 2026-02-19T08:14:59.630718Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [8b41b9ac8186ade8] bootstrap ActorId# [1:1079:3024] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:357:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:14:59.630950Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631002Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631034Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631066Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631094Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631125Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631154Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631181Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631209Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631237Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631269Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631300Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631329Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631359Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631389Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631418Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631445Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631472Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8b41b9ac8186ade8] Id# [72057594037932033:2:8:0:0:357:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:14:59.631510Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8b41b9ac8186ade8] restore Id# [72057594037932033:2:8:0:0:357:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:14:59.631598Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strateg ... rollerConfigResponse# NKikimrBlobStorage.TEvControllerConfigResponse Response { Status { Success: true } Success: true ConfigTxSeqNo: 5 } Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: Initial DiskSpace: Green Replicated: false UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 1494791997772109972 ReplicationProgress: nan ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 1494791997772109972 GroupSizeInUnits: 0 ... unblocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON 2026-02-19T08:15:02.888668Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1966: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON LOCAL RECOVERY SUCCEEDED Marker# BSVS29 2026-02-19T08:15:02.927912Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2134: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON SYNC GUID RECOVERY SUCCEEDED Marker# BSVS31 2026-02-19T08:15:02.928023Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1845: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON IS UP AND RUNNING Marker# BSVS28 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IncarnationGuid: 7591946001844094306 InstanceGuid: 1494791997772109972 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 7 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 1623217112207103264 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 6 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 6791729481784707724 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 6 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 9505861673938687476 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 6 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 17106383763962829302 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 4 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 6 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 6650550862876692562 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 5 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 7893980385120771350 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 6 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 5220472786528757894 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 7 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 8543374890683962395 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 1623217112207103264 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 6791729481784707724 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 9505861673938687476 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 17106383763962829302 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 4 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 6650550862876692562 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 5 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 7893980385120771350 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 6 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 5220472786528757894 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 7 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 8543374890683962395 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 1623217112207103264 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 6791729481784707724 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 9505861673938687476 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 17106383763962829302 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 4 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 6650550862876692562 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 5 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 7893980385120771350 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 6 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 5220472786528757894 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 7 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 8543374890683962395 AvailableSize: 8556380160 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 11175763238511507339 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 17177002963265937417 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 11175763238511507339 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 17177002963265937417 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 11175763238511507339 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 17177002963265937417 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 1494791997772109972 AvailableSize: 34225520640 GroupSizeInUnits: 2 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN >> TColumnShardTestSchema::ProposeResultNotViaPipe [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |90.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] Test command err: 2026-02-19T08:15:00.471289Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.473631Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.478219Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.478351Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.479793Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.482355Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.482477Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.482599Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.485329Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.486986Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.487122Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.487194Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.489145Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.489237Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.489298Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.489414Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.491886Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.493603Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.493751Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.496423Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.496542Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.496639Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.499046Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.499630Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.500895Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:00.502037Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ynks/0051a9/r3tmp/tmpDErgsY/pdisk_1.dat 2026-02-19T08:15:01.529330Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [695c02aa31dd4562] bootstrap ActorId# [1:1078:3024] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1365:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:15:01.529560Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529613Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529642Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529672Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529698Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529755Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529781Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529808Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529832Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529859Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529887Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529916Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529944Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.529973Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.530003Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.530033Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.530062Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.530090Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:01.530145Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [695c02aa31dd4562] restore Id# [72057594037932033:2:8:0:0:1365:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:15:01.530239Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [695c02aa31dd4562] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1365:1] Marker# BPG33 2026-02-19T08:15:01.530313Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [695c02aa31dd4562] Sending missing VPut part# 0 to# 0 blob Id# ... 1234:2:0:0:0:5:1] Marker# BPG32 2026-02-19T08:15:04.421682Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:1224:3068] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-02-19T08:15:04.428737Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [db38e40e5c321908] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-02-19T08:15:04.428939Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [db38e40e5c321908] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-02-19T08:15:04.429076Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [db38e40e5c321908] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:15:04.429894Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:15:04.429952Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-02-19T08:15:04.430094Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2026-02-19T08:15:04.430681Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ynks/0051a9/r3tmp/tmphj8FMJ//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-02-19T08:15:04.431799Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-02-19T08:15:04.431850Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:15:04.438663Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1237:2134] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.438888Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1238:2135] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439030Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1239:2136] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439380Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1240:2137] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439508Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1241:2138] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439642Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1242:2139] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439779Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:1235:2133] Create Queue# [3:1243:2140] targetNodeId# 2 Marker# DSP01 2026-02-19T08:15:04.439819Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:15:04.441825Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442142Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442247Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442487Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442574Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442657Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442722Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:04.442757Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-02-19T08:15:04.442800Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-02-19T08:15:04.442972Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [22f083b38c56d147] bootstrap ActorId# [3:1244:2141] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2026-02-19T08:15:04.443031Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [22f083b38c56d147] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:2:0:0:0] node# 2 Marker# DSPB03 2026-02-19T08:15:04.443216Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:1237:2134] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 4320961626353636811 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2026-02-19T08:15:04.444638Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [22f083b38c56d147] Handle TEvVBlockResult status# OK From# [82000002:2:0:0:0] NodeId# 2 Marker# DSPB01 2026-02-19T08:15:04.444710Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [22f083b38c56d147] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2026-02-19T08:15:04.445114Z node 3 :BS_PROXY INFO: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-02-19T08:15:04.445394Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-02-19T08:15:04.445864Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [a81f1bc553f4d9cc] bootstrap ActorId# [2:1245:3078] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-02-19T08:15:04.446030Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a81f1bc553f4d9cc] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:04.446099Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [a81f1bc553f4d9cc] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:15:04.446165Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a81f1bc553f4d9cc] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2026-02-19T08:15:04.446222Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a81f1bc553f4d9cc] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2026-02-19T08:15:04.446395Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:1224:3068] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-02-19T08:15:04.446740Z node 2 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:581: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2026-02-19T08:15:04.447089Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:265: [a81f1bc553f4d9cc] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-02-19T08:15:04.447197Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [a81f1bc553f4d9cc] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2026-02-19T08:15:04.447262Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:499: [a81f1bc553f4d9cc] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:15:04.447788Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:1237:2134] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken [GOOD] >> CheckIntegrityBlock42::DataOkErasureFiveParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 4882559894479543907 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2026-02-19T08:15:02.427116Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2026-02-19T08:15:02.427482Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2026-02-19T08:15:02.562816Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestDownCounterDeleteNode [GOOD] Test command err: 2026-02-19T08:13:39.847735Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.898592Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.899667Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.899728Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.899786Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.900097Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:39.901389Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:39.901467Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.902422Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:78:2078] ControllerId# 72057594037932033 2026-02-19T08:13:39.902471Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.902584Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.902704Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.920344Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:39.920402Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:39.924182Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:86:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.924367Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:87:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.924508Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:88:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.924641Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:89:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.924803Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:90:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.924934Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:91:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.925102Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:92:2089] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.925140Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:39.925239Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:78:2078] 2026-02-19T08:13:39.925285Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:78:2078] 2026-02-19T08:13:39.925337Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:39.925402Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.926438Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.926536Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.931119Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.931624Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.931683Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.931734Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.931969Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.932907Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:103:2077] ControllerId# 72057594037932033 2026-02-19T08:13:39.932946Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.933027Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.933162Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.942314Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.942467Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.942643Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:39.945739Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:39.946282Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:39.946338Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:2:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:39.946391Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:39.946597Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:39.947445Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:115:2077] ControllerId# 72057594037932033 2026-02-19T08:13:39.947490Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:39.947557Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:39.947672Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:39.956896Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:39.956958Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:39.958904Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:122:2081] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959044Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:123:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959170Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:124:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959283Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:125:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959466Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:126:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959614Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:127:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959747Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:114:2076] Create Queue# [3:128:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:39.959789Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:39.959865Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:115:2077] 2026-02-19T08:13:39.959896Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:115:2077] 2026-02-19T08:13:39.959939Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:39.959980Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:39.960454Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:39.960620Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:40.004875Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:78:2078] 2026-02-19T08:13:40.004961Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.017585Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 ... Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.221916Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.222020Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-02-19T08:15:03.222106Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-02-19T08:15:03.222143Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:575: TClient[72057594037936131] schedule retry [30:199:2162] 2026-02-19T08:15:03.255198Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [31:53:2064] 2026-02-19T08:15:03.255299Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [31:53:2064] 2026-02-19T08:15:03.255481Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:03.255557Z node 31 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 31 selfDC: 2 leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:15:03.255781Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:15:03.256231Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-02-19T08:15:03.256313Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-02-19T08:15:03.256383Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-02-19T08:15:03.256685Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.256776Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.256827Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.256988Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-02-19T08:15:03.257076Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [31:53:2064] 2026-02-19T08:15:03.257119Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:575: TClient[72057594037936129] schedule retry [31:53:2064] 2026-02-19T08:15:03.267624Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [30:77:2064] 2026-02-19T08:15:03.267721Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [30:77:2064] 2026-02-19T08:15:03.267878Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:03.267942Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 30 selfDC: 1 leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:15:03.268159Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:15:03.268283Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-02-19T08:15:03.268368Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-02-19T08:15:03.268421Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-02-19T08:15:03.268498Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.268588Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.268632Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.268716Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-02-19T08:15:03.268782Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [30:77:2064] 2026-02-19T08:15:03.268820Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:575: TClient[72057594037936129] schedule retry [30:77:2064] 2026-02-19T08:15:03.352031Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [30:199:2162] 2026-02-19T08:15:03.352120Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [30:199:2162] 2026-02-19T08:15:03.352238Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:03.352304Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 30 selfDC: 1 leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936131 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:15:03.352538Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:15:03.352691Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:15:03.352769Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:15:03.352815Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:15:03.352873Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.352952Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.353002Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:15:03.353090Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-02-19T08:15:03.353178Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-02-19T08:15:03.353219Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:575: TClient[72057594037936131] schedule retry [30:199:2162] 2026-02-19T08:15:03.423910Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} queued, type NKikimr::NHive::TTxDeleteNode 2026-02-19T08:15:03.424052Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:03.424372Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} hope 1 -> done Change{10, redo 64b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-02-19T08:15:03.424485Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:15:03.425421Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [30:537:2318] 2026-02-19T08:15:03.425512Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [30:537:2318] 2026-02-19T08:15:03.425653Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [30:452:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:03.429957Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 30 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [30:452:2266] 2026-02-19T08:15:03.430326Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [30:537:2318] 2026-02-19T08:15:03.430620Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [30:537:2318] 2026-02-19T08:15:03.430732Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [30:537:2318] 2026-02-19T08:15:03.430861Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [30:537:2318] 2026-02-19T08:15:03.431092Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [30:537:2318] 2026-02-19T08:15:03.431350Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [30:537:2318] 2026-02-19T08:15:03.431439Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [30:537:2318] 2026-02-19T08:15:03.431503Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [30:537:2318] 2026-02-19T08:15:03.431589Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [30:537:2318] 2026-02-19T08:15:03.431658Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [30:537:2318] 2026-02-19T08:15:03.431763Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [30:536:2317] EventType# 268830214 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] Test command err: Delete nodeId# 60 Add nodeId# 101 Disable nodeId# 69 Enable nodeId# 69 Disable nodeId# 2 Enable nodeId# 2 Pick Disable nodeId# 3 Delete nodeId# 55 Pick Pick Delete nodeId# 73 Delete nodeId# 14 Disable nodeId# 101 Disable nodeId# 63 Disable nodeId# 4 Disable nodeId# 1 Delete nodeId# 12 Disable nodeId# 59 Disable nodeId# 90 Enable nodeId# 101 Add nodeId# 102 Enable nodeId# 1 Pick Enable nodeId# 59 Delete nodeId# 90 Add nodeId# 103 Delete nodeId# 75 Add nodeId# 104 Add nodeId# 105 Delete nodeId# 67 Add nodeId# 106 Delete nodeId# 47 Enable nodeId# 4 Disable nodeId# 91 Enable nodeId# 91 Disable nodeId# 52 Add nodeId# 107 Pick Enable nodeId# 3 Disable nodeId# 8 Enable nodeId# 52 Disable nodeId# 46 Disable nodeId# 36 Pick Delete nodeId# 34 Pick Disable nodeId# 48 Add nodeId# 108 Pick Enable nodeId# 36 Disable nodeId# 1 Disable nodeId# 74 Delete nodeId# 70 Pick Delete nodeId# 83 Delete nodeId# 29 Pick Pick Enable nodeId# 8 Enable nodeId# 1 Disable nodeId# 69 Add nodeId# 109 Enable nodeId# 63 Delete nodeId# 82 Delete nodeId# 17 Enable nodeId# 74 Disable nodeId# 24 Pick Delete nodeId# 66 Pick Pick Disable nodeId# 98 Delete nodeId# 10 Pick Enable nodeId# 46 Enable nodeId# 69 Pick Enable nodeId# 98 Disable nodeId# 9 Add nodeId# 110 Disable nodeId# 72 Delete nodeId# 88 Delete nodeId# 26 Enable nodeId# 9 Delete nodeId# 84 Add nodeId# 111 Disable nodeId# 35 Enable nodeId# 24 Disable nodeId# 54 Disable nodeId# 85 Pick Disable nodeId# 41 Delete nodeId# 101 Disable nodeId# 27 Enable nodeId# 54 Disable nodeId# 18 Add nodeId# 112 Delete nodeId# 3 Enable nodeId# 85 Enable nodeId# 72 Disable nodeId# 52 Enable nodeId# 48 Delete nodeId# 11 Delete nodeId# 52 Delete nodeId# 86 Disable nodeId# 44 Delete nodeId# 45 Enable nodeId# 27 Add nodeId# 113 Delete nodeId# 33 Disable nodeId# 87 Disable nodeId# 21 Disable nodeId# 4 Disable nodeId# 28 Add nodeId# 114 Add nodeId# 115 Add nodeId# 116 Add nodeId# 117 Pick Enable nodeId# 44 Delete nodeId# 16 Add nodeId# 118 Disable nodeId# 114 Enable nodeId# 41 Add nodeId# 119 Enable nodeId# 28 Enable nodeId# 4 Pick Pick Pick Enable nodeId# 114 Delete nodeId# 35 Delete nodeId# 61 Add nodeId# 120 Delete nodeId# 2 Disable nodeId# 39 Pick Pick Delete nodeId# 44 Pick Pick Disable nodeId# 95 Disable nodeId# 59 Pick Pick Enable nodeId# 87 Enable nodeId# 39 Disable nodeId# 25 Pick Add nodeId# 121 Disable nodeId# 46 Disable nodeId# 112 Disable nodeId# 93 Pick Enable nodeId# 112 Disable nodeId# 63 Enable nodeId# 93 Delete nodeId# 40 Enable nodeId# 25 Pick Add nodeId# 122 Disable nodeId# 85 Pick Enable nodeId# 46 Disable nodeId# 31 Delete nodeId# 72 Add nodeId# 123 Enable nodeId# 21 Enable nodeId# 59 Add nodeId# 124 Pick Disable nodeId# 124 Pick Pick Enable nodeId# 85 Enable nodeId# 63 Enable nodeId# 31 Pick Pick Enable nodeId# 124 Disable nodeId# 87 Enable nodeId# 95 Add nodeId# 125 Enable nodeId# 18 Pick Delete nodeId# 58 Pick Pick Add nodeId# 126 Enable nodeId# 87 Add nodeId# 127 Pick Pick Delete nodeId# 87 Disable nodeId# 92 Pick Disable nodeId# 37 Delete nodeId# 31 Pick Pick Enable nodeId# 92 Disable nodeId# 120 Add nodeId# 128 Enable nodeId# 120 Delete nodeId# 57 Disable nodeId# 28 Enable nodeId# 28 Disable nodeId# 41 Add nodeId# 129 Pick Delete nodeId# 120 Enable nodeId# 37 Pick Disable nodeId# 74 Enable nodeId# 74 Delete nodeId# 97 Pick Pick Delete nodeId# 102 Disable nodeId# 13 Enable nodeId# 41 Add nodeId# 130 Pick Pick Pick Delete nodeId# 7 Enable nodeId# 13 Pick Disable nodeId# 46 Add nodeId# 131 Disable nodeId# 25 Add nodeId# 132 Enable nodeId# 25 Delete nodeId# 91 Disable nodeId# 4 Pick Enable nodeId# 4 Pick Pick Add nodeId# 133 Disable nodeId# 100 Disable nodeId# 109 Enable nodeId# 100 Add nodeId# 134 Delete nodeId# 8 Enable nodeId# 109 Delete nodeId# 65 Delete nodeId# 85 Disable nodeId# 32 Delete nodeId# 133 Add nodeId# 135 Pick Delete nodeId# 76 Add nodeId# 136 Delete nodeId# 122 Enable nodeId# 46 Delete nodeId# 113 Disable nodeId# 19 Disable nodeId# 9 Add nodeId# 137 Pick Disable nodeId# 130 Add nodeId# 138 Disable nodeId# 41 Pick Add nodeId# 139 Add nodeId# 140 Delete nodeId# 39 Disable nodeId# 136 Enable nodeId# 130 Pick Disable nodeId# 100 Pick Add nodeId# 141 Pick Disable nodeId# 53 Delete nodeId# 30 Disable nodeId# 130 Disable nodeId# 4 Pick Pick Disable nodeId# 105 Delete nodeId# 89 Delete nodeId# 54 Add nodeId# 142 Pick Delete nodeId# 121 Pick Delete nodeId# 53 Delete nodeId# 59 Add nodeId# 143 Disable nodeId# 126 Add nodeId# 144 Add nodeId# 145 Delete nodeId# 126 Add nodeId# 146 Add nodeId# 147 Enable nodeId# 4 Add nodeId# 148 Add nodeId# 149 Disable nodeId# 148 Pick Enable nodeId# 130 Pick Delete nodeId# 125 Add nodeId# 150 Delete nodeId# 38 Pick Add nodeId# 151 Delete nodeId# 64 Pick Delete nodeId# 140 Enable nodeId# 105 Add nodeId# 152 Enable nodeId# 41 Disable nodeId# 42 Disable nodeId# 144 Disable nodeId# 37 Enable nodeId# 19 Enable nodeId# 9 Delete nodeId# 106 Add nodeId# 153 Delete nodeId# 69 Delete nodeId# 51 Delete nodeId# 13 Pick Pick Add nodeId# 154 Add nodeId# 155 Enable nodeId# 144 Enable nodeId# 42 Disable nodeId# 50 Delete nodeId# 116 Enable nodeId# 37 Delete nodeId# 92 Enable nodeId# 100 Enable nodeId# 148 Delete nodeId# 4 Pick Disable nodeId# 18 Pick Delete nodeId# 5 Disable nodeId# 137 Delete nodeId# 81 Add nodeId# 156 Pick Enable nodeId# 137 Pick Enable nodeId# 18 Disable nodeId# 37 Disable nodeId# 148 Pick Pick Pick Add nodeId# 157 Delete nodeId# 152 Delete nodeId# 104 Add nodeId# 158 Disable nodeId# 129 Add nodeId# 159 Disable nodeId# 1 Add nodeId# 160 Pick Delete nodeId# 15 Pick Pick Enable nodeId# 136 Enable nodeId# 37 Pick Pick Pick Disable nodeId# 93 Add nodeId# 161 Disable nodeId# 135 Disable nodeId# 94 Pick Add nodeId# 162 Enable nodeId# 129 Pick Delete nodeId# 139 Delete nodeId# 145 Add nodeId# 163 Delete nodeId# 77 Disable nodeId# 95 Enable nodeId# 50 Disable nodeId# 129 Pick Add nodeId# 164 Delete nodeId# 105 Disable nodeId# 27 Pick Pick Enable nodeId# 32 Disable nodeId# 23 Enable nodeId# 129 Pick Delete nodeId# 95 Delete nodeId# 111 Add nodeId# 165 Pick Enable nodeId# 23 Enable nodeId# 94 Disable nodeId# 19 Disable nodeId# 149 Disable nodeId# 96 Disable nodeId# 147 Pick Pick Disable nodeId# 109 Add nodeId# 166 Enable nodeId# 147 Enable nodeId# 109 Enable nodeId# 96 Delete nodeId# 37 Enable nodeId# 149 Disable nodeId# 127 Disable nodeId# 147 Pick Add nodeId# 167 Delete nodeId# 147 Disable nodeId# 36 Delete nodeId# 96 Pick Add nodeId# 168 Disable nodeId# 134 Pick Enable nodeId# 135 Add nodeId# 169 Disable nodeId# 124 Pick Disable nodeId# 108 Disable nodeId# 28 Delete nodeId# 112 Pick Delete nodeId# 127 Pick Enable nodeId# 93 Disable nodeId# 169 Enable nodeId# 169 Delete nodeId# 166 Delete nodeId# 151 Pick Enable nodeId# 27 Disable nodeId# 146 Delete nodeId# 115 Pick Pick Delete nodeId# 21 Pick Disable nodeId# 25 Add nodeId# 170 Pick Delete nodeId# 132 Disable nodeId# 136 Disable nodeId# 68 Delete nodeId# 131 Disable nodeId# 158 Enable nodeId# 68 Pick Enable nodeId# 136 Pick Pick Disable nodeId# 153 Pick Add nodeId# 171 Add nodeId# 172 Disable nodeId# 109 Delete nodeId# 123 Disable nodeId# 150 Enable nodeId# 124 Delete nodeId# 142 Add nodeId# 173 Pick Delete nodeId# 56 Delete nodeId# 129 Enable nodeId# 146 Add nodeId# 174 Enable nodeId# 19 Disable nodeId# 32 Delete nodeId# 118 Delete nodeId# 22 Delete nodeId# 155 Enable nodeId# 153 Add nodeId# 175 Add nodeId# 176 Delete nodeId# 117 Enable nodeId# 108 Enable nodeId# 109 Pick Pick Disable nodeId# 163 Add nodeId# 177 Pick Pick Add nodeId# 178 Add nodeId# 179 Enable nodeId# 163 Enable nodeId# 28 Add nodeId# 180 Pick Enable nodeId# 148 Disable nodeId# 103 Add nodeId# 181 Add nodeId# 182 Enable nodeId# 25 Delete nodeId# 108 Disable nodeId# 156 Disable nodeId# 71 Delete nodeId# 9 Disable nodeId# 144 Delete nodeId# 144 Disable nodeId# 163 Enable nodeId# 150 Delete nodeId# 41 Enable nodeId# 158 Delete nodeId# 174 Delete nodeId# 48 Delete nodeId# 100 Pick Pick Delete nodeId# 161 Disable nodeId# 150 Enable nodeId# 1 Delete nodeId# 36 Disable nodeId# 114 Pick Add nodeId# 183 Add nodeId# 184 Disable nodeId# 63 Disable nodeId# 154 Delete nodeId# 124 Enable nodeId# 154 Add nodeId# 185 Pick Add nodeId# 186 Disable nodeId# 172 Add nodeId# 187 Disable nodeId# 128 Pick Enable nodeId# 150 Delete nodeId# 181 Disable nodeId# 143 Delete nodeId# 74 Disable nodeId# 135 Pick Disable nodeId# 177 Disable nodeId# 150 Add nodeId# 188 Pick Add nodeId# 189 Delete nodeId# 28 Pick Delete nodeId# 146 Add nodeId# 190 Delete nodeId# 158 Disable nodeId# 167 Disable nodeId# 170 Delete nodeId# 176 Add nodeId# 191 Delete nodeId# 99 Delete nodeId# 20 Pick Disable nodeId# 183 Pick Add nodeId# 192 Add nodeId# 193 Add nodeId# 194 Pick Pick Delete nodeId# 1 Add nodeId# 195 Pick Disable nodeId# 149 Delete nodeId# 163 Disable nodeId# 98 Add nodeId# 196 Delete nodeId# 194 Enable nodeId# 170 Disable nodeId# 68 Add nodeId# 197 Disable nodeId# 109 Pick Pick Pick Delete nodeId# 46 Add nodeId# 198 Enable nodeId# 71 Pick Pick Enable nodeId# 68 Add nodeId# 199 Delete nodeId# 190 Pick Pick Add nodeId# 200 Delete nodeId# 178 Disable nodeId# 200 Enable nodeId# 128 Disable nodeId# 27 Pick Delete nodeId# 32 Pick Pick Add nodeId# 201 Disable nodeId# 128 Delete nodeId# 199 Enable nodeId# 103 Enable nodeId# 128 Disable nodeId# 79 Enable nodeId# 98 Disable nodeId# 130 Add nodeId# 202 Pick Disable nodeId# 94 Delete nodeId# 23 Delete nodeId# 201 Add nodeId# 203 Pick Delete nodeId# 93 Disable nodeId# 103 Pick Enable nodeId# 135 Disable nodeId# 128 Disable nodeId# 62 Delete nodeId# 171 Delete nodeId# 187 Pick Enable nodeId# 94 Disable nodeId# 110 Delete nodeId# 27 Pick Pick Pick Disable nodeId# 107 Disable nodeId# 6 Delete nodeId# 177 Disable nodeId# 153 Disable nodeId# 179 Add nodeId# 204 Disable nodeId# 137 Pick Add nodeId# 205 Delete nodeId# 107 Disable nodeId# 180 Enable nodeId# 156 Disable nodeId# 49 Enable nodeId# 109 Enable nodeId# 172 Disable nodeId# 192 Delete nodeId# 137 Pick Delete nodeId# 141 Enable nodeId# 134 Pick Delete nodeId# 191 Enable nodeId# 167 Pick Add nodeId# 206 Add nodeId# 207 Pick Enable nodeId# 103 Disable nodeId# 159 Add nodeId# 208 Add nodeId# 209 Delete nodeId# 103 Delete nodeId# 175 Pick Pick Disable nodeId# 119 Delete nodeId# 193 Add nodeId# 210 Pick Delete nodeId# 180 Add nodeId# 211 Enable nodeId# 159 Pick Delete nodeId# 50 Enable nodeId# 63 Add nodeId# 212 Add nodeId# 213 Delete nodeId# 49 Pick Enable nodeId# 79 Add nodeId# 214 Delete nodeId# 196 Delete nodeId# 209 Pick Pick Delete nodeId# 204 Add nodeId# 215 Enable nodeId# 143 Disable nodeId# 184 Pick Delete nodeId# 114 Pick Pick Disable nodeId# 211 Delete nodeId# 19 Enable nodeId# 6 Pick Enable nodeId# 119 Enable nodeId# 110 Enable nodeId# 184 Enable nodeId# 192 Enable nodeId# 183 Enable nodeId# 150 Add nodeId# 216 Delete nodeId# 162 Pick Enable nodeId# 200 Enable nodeId# 128 Delete nodeId# 213 Delete nodeId# 205 Add nodeId# 217 Add nodeId# 218 Enable nodeId# 130 Disable nodeId# 185 Pick Pick Disable nodeId# 156 Enable nodeId# 185 Add nodeId# 219 Delete nodeId# 206 Pick Delete nodeId# 150 Add nodeId# 220 Disable nodeId# 98 Pick Add nodeId# 221 Enable nodeId# 156 Pick Add nodeId# 222 Delete nodeId# 182 Disable nodeId# 110 Pick Delete nodeId# 173 Delete nodeId# 98 Disable nodeId# 216 Pick Add nodeId# 223 Add nodeId# 224 Delete nodeId# 138 Delete nodeId# 197 Add nodeId# 225 Add nodeId# 226 Add nodeId# 227 Disable nodeId# 134 Enable nodeId# 211 Enable nodeId# 149 Add nodeId# 228 Pick Disable nodeId# 186 Add nodeId# 229 Add nodeId# 230 Add nodeId# 231 Disable nodeId# 228 Disable nodeId# 185 Add nodeId# 232 Disable nodeId# 211 Add nodeId# 233 Pick Disable nodeId# 229 Enable nodeId# 211 Pick Pick Delete nodeId# 212 Enable nodeId# 228 Delete nodeId# 208 Pick Delete nodeId# 210 Enable nodeId# 110 Pick Enable nodeId# 186 Pick Enable nodeId# 153 Enable nodeId# 216 Pick Enable nodeId# 229 Enable nodeId# 62 Delete nodeId# 18 Disable nodeId# 24 Pick Disable nodeId# 149 Pick Delete nodeId# 110 Disable nodeId# 42 Disable nodeId# 68 Delete nodeId# 220 Enable nodeId# 1 ... 98 Add nodeId# 20264 Pick Pick Delete nodeId# 20038 Enable nodeId# 20040 Enable nodeId# 20251 Delete nodeId# 20159 Pick Pick Delete nodeId# 20193 Disable nodeId# 20212 Pick Pick Disable nodeId# 20150 Disable nodeId# 20235 Pick Pick Pick Enable nodeId# 20155 Enable nodeId# 20150 Disable nodeId# 20261 Disable nodeId# 20191 Delete nodeId# 19955 Disable nodeId# 20258 Add nodeId# 20265 Add nodeId# 20266 Delete nodeId# 20266 Add nodeId# 20267 Disable nodeId# 20260 Enable nodeId# 20191 Add nodeId# 20268 Disable nodeId# 20224 Delete nodeId# 20238 Pick Pick Pick Delete nodeId# 20256 Pick Pick Pick Delete nodeId# 20205 Add nodeId# 20269 Pick Add nodeId# 20270 Add nodeId# 20271 Enable nodeId# 20254 Delete nodeId# 20184 Delete nodeId# 20243 Delete nodeId# 20222 Delete nodeId# 20100 Delete nodeId# 20165 Delete nodeId# 20042 Pick Enable nodeId# 20235 Delete nodeId# 20173 Disable nodeId# 20213 Delete nodeId# 20235 Add nodeId# 20272 Pick Delete nodeId# 20098 Enable nodeId# 20261 Enable nodeId# 20212 Delete nodeId# 20096 Pick Pick Add nodeId# 20273 Pick Add nodeId# 20274 Enable nodeId# 20224 Enable nodeId# 20260 Disable nodeId# 20224 Delete nodeId# 20273 Pick Disable nodeId# 20267 Pick Add nodeId# 20275 Enable nodeId# 20258 Pick Delete nodeId# 20263 Pick Enable nodeId# 20130 Pick Pick Disable nodeId# 20258 Add nodeId# 20276 Enable nodeId# 20224 Pick Pick Pick Add nodeId# 20277 Delete nodeId# 20150 Disable nodeId# 20259 Pick Delete nodeId# 20040 Delete nodeId# 20277 Delete nodeId# 20223 Disable nodeId# 20255 Disable nodeId# 20225 Add nodeId# 20278 Add nodeId# 20279 Enable nodeId# 20255 Add nodeId# 20280 Enable nodeId# 20258 Add nodeId# 20281 Delete nodeId# 20105 Disable nodeId# 20226 Add nodeId# 20282 Add nodeId# 20283 Delete nodeId# 20262 Disable nodeId# 20271 Delete nodeId# 20149 Disable nodeId# 20179 Delete nodeId# 20251 Delete nodeId# 20274 Enable nodeId# 20179 Enable nodeId# 20226 Add nodeId# 20284 Disable nodeId# 20209 Add nodeId# 20285 Pick Disable nodeId# 20221 Add nodeId# 20286 Add nodeId# 20287 Disable nodeId# 20278 Disable nodeId# 20261 Enable nodeId# 20267 Disable nodeId# 20218 Pick Add nodeId# 20288 Disable nodeId# 20128 Disable nodeId# 20226 Delete nodeId# 20284 Add nodeId# 20289 Delete nodeId# 20067 Pick Delete nodeId# 20259 Disable nodeId# 20198 Pick Pick Disable nodeId# 20155 Disable nodeId# 20232 Enable nodeId# 20271 Disable nodeId# 20268 Pick Add nodeId# 20290 Pick Delete nodeId# 20271 Delete nodeId# 20198 Delete nodeId# 20162 Disable nodeId# 20280 Enable nodeId# 20128 Delete nodeId# 20270 Add nodeId# 20291 Delete nodeId# 20268 Disable nodeId# 20258 Delete nodeId# 20185 Enable nodeId# 20226 Disable nodeId# 20272 Pick Pick Disable nodeId# 20130 Delete nodeId# 20225 Pick Pick Enable nodeId# 20272 Pick Add nodeId# 20292 Disable nodeId# 20247 Add nodeId# 20293 Delete nodeId# 20213 Disable nodeId# 20212 Pick Pick Delete nodeId# 20231 Delete nodeId# 20245 Enable nodeId# 20130 Add nodeId# 20294 Pick Disable nodeId# 20240 Add nodeId# 20295 Delete nodeId# 20275 Pick Enable nodeId# 20218 Add nodeId# 20296 Delete nodeId# 20209 Add nodeId# 20297 Delete nodeId# 20283 Delete nodeId# 20241 Disable nodeId# 20286 Enable nodeId# 20278 Disable nodeId# 20177 Add nodeId# 20298 Delete nodeId# 20265 Pick Pick Add nodeId# 20299 Delete nodeId# 20257 Disable nodeId# 20293 Pick Pick Pick Disable nodeId# 20156 Delete nodeId# 20191 Disable nodeId# 20219 Enable nodeId# 20258 Disable nodeId# 20267 Disable nodeId# 20179 Enable nodeId# 20293 Disable nodeId# 20253 Add nodeId# 20300 Enable nodeId# 20221 Pick Enable nodeId# 20261 Add nodeId# 20301 Add nodeId# 20302 Enable nodeId# 20177 Enable nodeId# 20240 Delete nodeId# 20300 Delete nodeId# 20226 Disable nodeId# 20217 Enable nodeId# 20217 Pick Delete nodeId# 20239 Enable nodeId# 20267 Pick Enable nodeId# 20286 Disable nodeId# 20264 Delete nodeId# 20179 Pick Delete nodeId# 20217 Enable nodeId# 20247 Delete nodeId# 20218 Delete nodeId# 20297 Delete nodeId# 20292 Pick Disable nodeId# 20267 Add nodeId# 20303 Pick Disable nodeId# 20302 Add nodeId# 20304 Disable nodeId# 20234 Pick Add nodeId# 20305 Disable nodeId# 20286 Enable nodeId# 20286 Add nodeId# 20306 Pick Pick Disable nodeId# 20130 Add nodeId# 20307 Disable nodeId# 20057 Disable nodeId# 20230 Disable nodeId# 20305 Enable nodeId# 20219 Delete nodeId# 20302 Delete nodeId# 20024 Pick Add nodeId# 20308 Delete nodeId# 20296 Enable nodeId# 20130 Disable nodeId# 20286 Delete nodeId# 20295 Add nodeId# 20309 Add nodeId# 20310 Pick Enable nodeId# 20267 Pick Enable nodeId# 20280 Pick Pick Enable nodeId# 20286 Delete nodeId# 20299 Pick Add nodeId# 20311 Pick Pick Enable nodeId# 20264 Enable nodeId# 20156 Delete nodeId# 20288 Enable nodeId# 20234 Add nodeId# 20312 Pick Enable nodeId# 20155 Disable nodeId# 20228 Delete nodeId# 20308 Add nodeId# 20313 Enable nodeId# 20212 Enable nodeId# 20230 Delete nodeId# 20234 Add nodeId# 20314 Pick Pick Enable nodeId# 20232 Enable nodeId# 20253 Disable nodeId# 20280 Disable nodeId# 20230 Enable nodeId# 20305 Disable nodeId# 20146 Enable nodeId# 20228 Add nodeId# 20315 Delete nodeId# 20219 Add nodeId# 20316 Add nodeId# 20317 Pick Delete nodeId# 20122 Add nodeId# 20318 Disable nodeId# 20282 Disable nodeId# 20128 Disable nodeId# 20267 Enable nodeId# 20057 Disable nodeId# 20221 Disable nodeId# 20293 Delete nodeId# 20180 Pick Add nodeId# 20319 Enable nodeId# 20221 Enable nodeId# 20230 Delete nodeId# 20269 Pick Disable nodeId# 20279 Delete nodeId# 20128 Disable nodeId# 20260 Pick Enable nodeId# 20280 Enable nodeId# 20260 Disable nodeId# 20316 Delete nodeId# 20304 Delete nodeId# 20258 Enable nodeId# 20279 Delete nodeId# 20310 Pick Add nodeId# 20320 Add nodeId# 20321 Add nodeId# 20322 Pick Delete nodeId# 20255 Enable nodeId# 20146 Add nodeId# 20323 Delete nodeId# 20323 Disable nodeId# 20317 Add nodeId# 20324 Delete nodeId# 20298 Disable nodeId# 20311 Pick Pick Enable nodeId# 20293 Disable nodeId# 20315 Enable nodeId# 20267 Add nodeId# 20325 Pick Delete nodeId# 20285 Add nodeId# 20326 Delete nodeId# 20301 Enable nodeId# 20316 Pick Add nodeId# 20327 Pick Delete nodeId# 20260 Add nodeId# 20328 Add nodeId# 20329 Disable nodeId# 20267 Pick Add nodeId# 20330 Pick Enable nodeId# 20315 Enable nodeId# 20311 Pick Add nodeId# 20331 Add nodeId# 20332 Delete nodeId# 20232 Disable nodeId# 20315 Add nodeId# 20333 Enable nodeId# 20282 Enable nodeId# 20267 Pick Delete nodeId# 20324 Disable nodeId# 20267 Pick Add nodeId# 20334 Add nodeId# 20335 Add nodeId# 20336 Enable nodeId# 20315 Add nodeId# 20337 Disable nodeId# 20326 Disable nodeId# 20130 Disable nodeId# 20303 Disable nodeId# 20325 Pick Enable nodeId# 20130 Disable nodeId# 20314 Add nodeId# 20338 Enable nodeId# 20303 Add nodeId# 20339 Pick Pick Delete nodeId# 20282 Pick Enable nodeId# 20326 Delete nodeId# 20281 Disable nodeId# 20276 Add nodeId# 20340 Pick Disable nodeId# 20247 Pick Delete nodeId# 20313 Add nodeId# 20341 Add nodeId# 20342 Enable nodeId# 20317 Delete nodeId# 20330 Disable nodeId# 20335 Enable nodeId# 20247 Pick Enable nodeId# 20276 Add nodeId# 20343 Enable nodeId# 20325 Disable nodeId# 20334 Pick Enable nodeId# 20334 Disable nodeId# 20286 Delete nodeId# 20254 Disable nodeId# 20336 Enable nodeId# 20286 Pick Enable nodeId# 20336 Delete nodeId# 20057 Pick Pick Enable nodeId# 20267 Delete nodeId# 20336 Enable nodeId# 20314 Pick Disable nodeId# 20146 Enable nodeId# 20146 Pick Add nodeId# 20344 Enable nodeId# 20335 Pick Delete nodeId# 20333 Disable nodeId# 20325 Disable nodeId# 20311 Pick Pick Add nodeId# 20345 Delete nodeId# 20156 Add nodeId# 20346 Enable nodeId# 20311 Enable nodeId# 20325 Pick Disable nodeId# 20331 Delete nodeId# 20343 Enable nodeId# 20331 Delete nodeId# 20338 Disable nodeId# 20319 Enable nodeId# 20319 Delete nodeId# 20261 Pick Disable nodeId# 20290 Pick Add nodeId# 20347 Delete nodeId# 20155 Pick Add nodeId# 20348 Disable nodeId# 20347 Disable nodeId# 20341 Enable nodeId# 20341 Enable nodeId# 20290 Add nodeId# 20349 Add nodeId# 20350 Disable nodeId# 20130 Disable nodeId# 20325 Delete nodeId# 20328 Enable nodeId# 20130 Pick Delete nodeId# 20325 Pick Pick Delete nodeId# 20347 Add nodeId# 20351 Delete nodeId# 20312 Add nodeId# 20352 Add nodeId# 20353 Disable nodeId# 20337 Add nodeId# 20354 Enable nodeId# 20337 Delete nodeId# 20327 Add nodeId# 20355 Delete nodeId# 20349 Delete nodeId# 20339 Pick Disable nodeId# 20353 Pick Disable nodeId# 20307 Disable nodeId# 20278 Delete nodeId# 20352 Add nodeId# 20356 Disable nodeId# 20344 Pick Disable nodeId# 20230 Pick Enable nodeId# 20278 Delete nodeId# 20287 Add nodeId# 20357 Disable nodeId# 20253 Enable nodeId# 20344 Enable nodeId# 20307 Enable nodeId# 20230 Disable nodeId# 20344 Delete nodeId# 20350 Delete nodeId# 20293 Pick Add nodeId# 20358 Disable nodeId# 20278 Pick Disable nodeId# 20264 Disable nodeId# 20112 Pick Delete nodeId# 20311 Pick Delete nodeId# 20224 Enable nodeId# 20344 Enable nodeId# 20353 Disable nodeId# 20332 Enable nodeId# 20112 Add nodeId# 20359 Enable nodeId# 20278 Pick Delete nodeId# 20294 Delete nodeId# 20272 Disable nodeId# 20280 Add nodeId# 20360 Pick Pick Disable nodeId# 20326 Disable nodeId# 20291 Add nodeId# 20361 Add nodeId# 20362 Disable nodeId# 20354 Disable nodeId# 20359 Enable nodeId# 20326 Enable nodeId# 20332 Add nodeId# 20363 Enable nodeId# 20253 Pick Enable nodeId# 20354 Add nodeId# 20364 Add nodeId# 20365 Add nodeId# 20366 Enable nodeId# 20291 Pick Enable nodeId# 20280 Delete nodeId# 20359 Pick Enable nodeId# 20264 Delete nodeId# 20340 Add nodeId# 20367 Delete nodeId# 20326 Pick Add nodeId# 20368 Delete nodeId# 20322 Pick Delete nodeId# 20280 Pick Disable nodeId# 20290 Pick Disable nodeId# 20334 Enable nodeId# 20290 Enable nodeId# 20334 Delete nodeId# 20240 Disable nodeId# 20355 Add nodeId# 20369 Delete nodeId# 20368 Delete nodeId# 20279 Add nodeId# 20370 Enable nodeId# 20355 Pick Delete nodeId# 20364 Disable nodeId# 20146 Add nodeId# 20371 Add nodeId# 20372 Add nodeId# 20373 Add nodeId# 20374 Pick Add nodeId# 20375 Disable nodeId# 20348 Add nodeId# 20376 Delete nodeId# 20358 Delete nodeId# 20361 Add nodeId# 20377 Enable nodeId# 20348 Disable nodeId# 20316 Pick Pick Pick Disable nodeId# 20307 Enable nodeId# 20316 Delete nodeId# 20376 Enable nodeId# 20146 Disable nodeId# 20318 Delete nodeId# 20289 Enable nodeId# 20307 Enable nodeId# 20318 Delete nodeId# 20348 Pick Pick Add nodeId# 20378 Pick Delete nodeId# 20378 Delete nodeId# 20146 Delete nodeId# 20306 Pick Disable nodeId# 20319 Disable nodeId# 20305 Pick Enable nodeId# 20305 Enable nodeId# 20319 Disable nodeId# 20290 Add nodeId# 20379 Delete nodeId# 20130 Delete nodeId# 20264 Disable nodeId# 20377 Add nodeId# 20380 Delete nodeId# 20331 Delete nodeId# 20369 Disable nodeId# 20366 Add nodeId# 20381 Add nodeId# 20382 Disable nodeId# 20112 Enable nodeId# 20377 Enable nodeId# 20290 Add nodeId# 20383 Disable nodeId# 20345 Add nodeId# 20384 Add nodeId# 20385 Add nodeId# 20386 Disable nodeId# 20335 Delete nodeId# 20379 Pick Delete nodeId# 20346 Enable nodeId# 20366 Enable nodeId# 20345 Pick Add nodeId# 20387 Add nodeId# 20388 Add nodeId# 20389 Delete nodeId# 20337 Disable nodeId# 20372 Disable nodeId# 20253 Disable nodeId# 20177 Add nodeId# 20390 Disable nodeId# 20286 Enable nodeId# 20112 Pick Enable nodeId# 20335 Enable nodeId# 20253 Delete nodeId# 20344 Delete nodeId# 20317 Add nodeId# 20391 Delete nodeId# 20342 Add nodeId# 20392 Pick Enable nodeId# 20372 Delete nodeId# 20291 Disable nodeId# 20307 Enable nodeId# 20177 Disable nodeId# 20309 Add nodeId# 20393 Disable nodeId# 20385 Pick Pick Pick Add nodeId# 20394 Disable nodeId# 20253 Disable nodeId# 20384 Add nodeId# 20395 Delete nodeId# 20278 Enable nodeId# 20385 Add nodeId# 20396 Disable nodeId# 20363 Enable nodeId# 20384 Add nodeId# 20397 Enable nodeId# 20309 Disable nodeId# 20321 Disable nodeId# 20381 Enable nodeId# 20363 Delete nodeId# 20353 Disable nodeId# 20392 Delete nodeId# 20381 Enable nodeId# 20253 Add nodeId# 20398 Enable nodeId# 20392 Add nodeId# 20399 Enable nodeId# 20307 Pick Delete nodeId# 20228 Disable nodeId# 20370 Enable nodeId# 20321 >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] Test command err: 2026-02-19T08:14:58.686355Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.688205Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.707498Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.707693Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.709449Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.710401Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.710502Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.711784Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.716450Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.718027Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.718220Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ynks/0051ae/r3tmp/tmpuCOUTX/pdisk_1.dat 2026-02-19T08:14:58.808152Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.808242Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.808301Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.808348Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.808885Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.978677Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.979018Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.979134Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.979203Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.979281Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.979736Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989313Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989435Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989485Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989562Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989616Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:58.989673Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.005581Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.232012Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.232138Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.232201Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.232266Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.232325Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.237041Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.237161Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.237213Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.237267Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.237329Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.240375Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.240601Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.240671Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.241066Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:15:01.241128Z ... roughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } - Test case 2 - enable PreferInferredSettingsOverExplicit 2026-02-19T08:15:04.912829Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:171} Inferred PDisk slot count Path# SectorMap:TestInferPDiskSlotCountExplicitConfig:2400 SlotCount# 12 SlotSizeInUnits# 2 FromDriveSize# 2576980377600 FromUnitSize# 107374182400 FromMaxSlots# 16 2026-02-19T08:15:04.912900Z node 3 :BS_NODE DEBUG: {NW112@node_warden_impl.cpp:1300} SendChangeExpectedSlotCount from config notification PDiskId# 1001 ExpectedSlotCount# 12 SlotSizeInUnits# 2 2026-02-19T08:15:04.913064Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:208} CreateReqFromEv with sender ev# {EvChangeExpectedSlotCount ExpectedSlotCount# 12 SlotSizeInUnits# 2} Sender# 7 ReqId# 10010000006 PDiskId# 1001 2026-02-19T08:15:04.913125Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560001874 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.576413 2026-02-19T08:15:04.913279Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.574320 2026-02-19T08:15:04.913388Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TChangeExpectedSlotCount OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:04.913427Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560001874 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:04.913478Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:04.913513Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560001604 PushRequestToScheduler Push to FastOperationsQueue.size# 2 2026-02-19T08:15:04.913594Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:395} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-02-19T08:15:04.913659Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000007 PDiskId# 1001 2026-02-19T08:15:04.913740Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:350} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2562560002116 PDiskId# 1001 2026-02-19T08:15:04.913860Z node 3 :BS_NODE DEBUG: {NW108@node_warden_impl.cpp:672} Handle(NPDisk::TEvChangeExpectedSlotCountResult) Msg# {EvChangeExpectedSlotCountResult Status#OK ErrorReason# ""} 2026-02-19T08:15:04.914048Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560002116 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.577003 2026-02-19T08:15:04.914138Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:04.914170Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560002116 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:04.914218Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:395} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-02-19T08:15:05.010527Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000008 PDiskId# 1001 2026-02-19T08:15:05.010631Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560002360 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.673895 2026-02-19T08:15:05.011234Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:05.011291Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560002360 PushRequestToScheduler Push to FastOperationsQueue.size# 1 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 12 NumActiveSlots: 0 SlotSizeInUnits: 2 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } - Test case 3 - update InferPDiskSlotCountSettings 2026-02-19T08:15:05.012001Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:171} Inferred PDisk slot count Path# SectorMap:TestInferPDiskSlotCountExplicitConfig:2400 SlotCount# 6 SlotSizeInUnits# 8 FromDriveSize# 2576980377600 FromUnitSize# 53687091200 FromMaxSlots# 9 2026-02-19T08:15:05.012070Z node 3 :BS_NODE DEBUG: {NW112@node_warden_impl.cpp:1300} SendChangeExpectedSlotCount from config notification PDiskId# 1001 ExpectedSlotCount# 6 SlotSizeInUnits# 8 2026-02-19T08:15:05.012237Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:208} CreateReqFromEv with sender ev# {EvChangeExpectedSlotCount ExpectedSlotCount# 6 SlotSizeInUnits# 8} Sender# 7 ReqId# 10010000009 PDiskId# 1001 2026-02-19T08:15:05.012294Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560002642 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.675571 2026-02-19T08:15:05.012860Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TChangeExpectedSlotCount OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:05.012912Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560002642 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:05.013085Z node 3 :BS_NODE DEBUG: {NW108@node_warden_impl.cpp:672} Handle(NPDisk::TEvChangeExpectedSlotCountResult) Msg# {EvChangeExpectedSlotCountResult Status#OK ErrorReason# ""} 2026-02-19T08:15:05.108699Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000010 PDiskId# 1001 2026-02-19T08:15:05.108803Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560002872 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.772060 2026-02-19T08:15:05.109061Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:05.109112Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560002872 PushRequestToScheduler Push to FastOperationsQueue.size# 1 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 6 NumActiveSlots: 0 SlotSizeInUnits: 8 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 6 SlotSizeInUnits: 8 PDiskUsage: 0 } - Test case 4 - remove InferPDiskSlotCountSettings 2026-02-19T08:15:05.109816Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:TestInferPDiskSlotCountExplicitConfig:2400 SlotCount# 13 SlotSizeInUnits# 0 2026-02-19T08:15:05.109901Z node 3 :BS_NODE DEBUG: {NW112@node_warden_impl.cpp:1300} SendChangeExpectedSlotCount from config notification PDiskId# 1001 ExpectedSlotCount# 13 SlotSizeInUnits# 0 2026-02-19T08:15:05.110055Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:208} CreateReqFromEv with sender ev# {EvChangeExpectedSlotCount ExpectedSlotCount# 13 SlotSizeInUnits# 0} Sender# 7 ReqId# 10010000011 PDiskId# 1001 2026-02-19T08:15:05.110111Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560003154 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.773378 2026-02-19T08:15:05.110212Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TChangeExpectedSlotCount OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:05.110254Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560003154 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-02-19T08:15:05.111114Z node 3 :BS_NODE DEBUG: {NW108@node_warden_impl.cpp:672} Handle(NPDisk::TEvChangeExpectedSlotCountResult) Msg# {EvChangeExpectedSlotCountResult Status#OK ErrorReason# ""} 2026-02-19T08:15:05.210401Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000012 PDiskId# 1001 2026-02-19T08:15:05.210507Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4900: PDiskId# 1001 ReqId# 2562560003384 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 3612.873744 2026-02-19T08:15:05.210658Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3149} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-02-19T08:15:05.210699Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3602: PDiskId# 1001 ReqId# 2562560003384 PushRequestToScheduler Push to FastOperationsQueue.size# 1 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 13 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ProposeResultNotViaPipe [GOOD] Test command err: 2026-02-19T08:15:04.702512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:15:04.738590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:15:04.738924Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:15:04.747578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:15:04.747867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:15:04.748170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:15:04.748305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:15:04.748424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:15:04.748540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:15:04.748650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:15:04.748802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:15:04.748927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:15:04.749040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.749167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:15:04.749288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:15:04.749428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:15:04.780526Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:15:04.780757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:15:04.780840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:15:04.781057Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:04.781223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:15:04.781307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:15:04.781357Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:15:04.781465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:15:04.781549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:15:04.781600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:15:04.781633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:15:04.781873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:04.781949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:15:04.781994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:15:04.782029Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:15:04.782132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:15:04.782196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:15:04.782253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:15:04.782306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:15:04.782363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:15:04.782415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:15:04.782465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:15:04.782522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:15:04.782573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:15:04.782630Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:15:04.782837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:15:04.782886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:15:04.782920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:15:04.783074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:15:04.783138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.783177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.783237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:15:04.783280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:15:04.783316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:15:04.783380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:15:04.783425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:15:04.783458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:15:04.783598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:15:04.783651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ma::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:15:04.784022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:151;step=TTxUpdateSchema.Execute_Finish; 2026-02-19T08:15:05.043193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:158;step=TTxUpdateSchema.Complete; 2026-02-19T08:15:05.043351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxInit.Execute at tablet 9437184 2026-02-19T08:15:05.046609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=18; 2026-02-19T08:15:05.046857Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=133; 2026-02-19T08:15:05.047274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=335; 2026-02-19T08:15:05.047439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=109; 2026-02-19T08:15:05.047667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=37; 2026-02-19T08:15:05.047744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=32; 2026-02-19T08:15:05.047818Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2026-02-19T08:15:05.047897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2026-02-19T08:15:05.047964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2026-02-19T08:15:05.048048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=573; 2026-02-19T08:15:05.048138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=49; 2026-02-19T08:15:05.048231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=42; 2026-02-19T08:15:05.048308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=41; 2026-02-19T08:15:05.048364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=30; 2026-02-19T08:15:05.048481Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=90; 2026-02-19T08:15:05.048773Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=259; 2026-02-19T08:15:05.048806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2026-02-19T08:15:05.048839Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=4; 2026-02-19T08:15:05.048876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-02-19T08:15:05.048932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2026-02-19T08:15:05.048958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-02-19T08:15:05.049031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2026-02-19T08:15:05.049091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=17; 2026-02-19T08:15:05.049186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=45; 2026-02-19T08:15:05.049275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=40; 2026-02-19T08:15:05.049338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=24; 2026-02-19T08:15:05.049380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2670; 2026-02-19T08:15:05.049515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:15:05.049566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:15:05.049659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:15:05.050084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:15:05.050161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:15:05.050255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.005000s; 2026-02-19T08:15:05.050664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:15:05.050793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:15:05.050854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:15:05.050904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:15:05.051030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.026000s; 2026-02-19T08:15:05.051073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; CreateInitShard: { InitShard { OwnerPathId: 1 Tables { PathId: 1 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" TypeId: 50 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } TtlSettings { Version: 1 } } OwnerPath: "/Root/olapStore" } } 2026-02-19T08:15:05.140693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=100;this=137120011966784;method=TTxController::StartProposeOnExecute;tx_info=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:15:05.140891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=100;this=137120011966784;method=TTxController::StartProposeOnExecute;tx_info=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-02-19T08:15:05.155538Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;this=137120011966784;op_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_op_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_this=137326173148032;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-02-19T08:15:05.155678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;this=137120011966784;op_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_op_tx=100:TX_KIND_SCHEMA;min=1771488904763;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:0;;int_this=137326173148032;method=TTxController::FinishProposeOnComplete;tx_id=100;fline=propose_tx.cpp:25;message=;tablet_id=9437184;tx_id=100; >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2026-02-19T08:14:59.740259Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.742828Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.748161Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.748279Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.749915Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.754202Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.754359Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.754474Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.758087Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.760881Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.761274Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.761341Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:5:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:5:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.764178Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.764601Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.764678Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.765173Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:6:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:6:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.768959Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:4:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:4:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.771654Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:4:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.771806Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.775861Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:6:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.776184Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.776282Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.780096Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.781065Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:5:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.784824Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-02-19T08:14:59.787048Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:7:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/ynks/0051aa/r3tmp/tmpSOjpvF/pdisk_1.dat 2026-02-19T08:15:00.706626Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [695c02aa31dd4562] bootstrap ActorId# [1:1078:3024] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1365:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:15:00.706847Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.706900Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.706930Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.706960Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.706988Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707016Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707051Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707079Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707110Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707136Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707164Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707189Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707234Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707271Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707297Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707326Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707353Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707379Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [695c02aa31dd4562] Id# [72057594037932033:2:8:0:0:1365:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:00.707423Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [695c02aa31dd4562] restore Id# [72057594037932033:2:8:0:0:1365:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:15:00.707515Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [695c02aa31dd4562] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1365:1] Marker# BPG33 2026-02-19T08:15:00.707564Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [695c02aa31dd4562] Sending missing VPut part# 0 to# 0 blob Id# ... ns Marker# DSP03 2026-02-19T08:15:05.746654Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.746835Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.746990Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.747156Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.747235Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.747301Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.747369Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.747410Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-02-19T08:15:05.747453Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-02-19T08:15:05.747503Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:340: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2026-02-19T08:15:05.748256Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [fe7703e0781c10d0] bootstrap ActorId# [3:1232:3075] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-02-19T08:15:05.748384Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [fe7703e0781c10d0] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:15:05.748428Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [fe7703e0781c10d0] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:15:05.748482Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [fe7703e0781c10d0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2026-02-19T08:15:05.748515Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [fe7703e0781c10d0] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-02-19T08:15:05.748623Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:1225:3068] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-02-19T08:15:05.752887Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [fe7703e0781c10d0] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-02-19T08:15:05.753018Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [fe7703e0781c10d0] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-02-19T08:15:05.753067Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [fe7703e0781c10d0] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:15:05.753547Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:15:05.753586Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-02-19T08:15:05.753864Z node 4 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2026-02-19T08:15:05.754234Z node 4 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/ynks/0051aa/r3tmp/tmpzAJxOM//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-02-19T08:15:05.755093Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-02-19T08:15:05.755132Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:15:05.756862Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1236:2134] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757008Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1237:2135] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757126Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1238:2136] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757241Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1239:2137] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757358Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1240:2138] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757467Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1241:2139] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757561Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:1234:2133] Create Queue# [4:1242:2140] targetNodeId# 3 Marker# DSP01 2026-02-19T08:15:05.757586Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:15:05.759049Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759276Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759330Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759464Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759517Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759594Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759636Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-02-19T08:15:05.759658Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-02-19T08:15:05.759683Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-02-19T08:15:05.759836Z node 4 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [4:1236:2134] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] Test command err: RandomSeed# 13529138678366097667 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 ], ver1 disks [ 2 ] ERROR: There are unequal parts *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ], ver1 disks [ 3 4 5 ] ERROR: There are unequal parts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] Test command err: RandomSeed# 1151111277127233832 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 0 ] part 2: ver0 disks [ 7 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 |90.9%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 4337271987694797956 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2026-02-19T08:15:03.485700Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6355:837] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 10123396953944588049 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails >> TResourceBrokerInstant::TestMerge >> TResourceBroker::TestQueueWithConfigure >> TResourceBrokerInstant::Test >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> TResourceBrokerInstant::Test [GOOD] >> TResourceBrokerInstant::TestErrors >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TColumnShardTestSchema::RebootExportAfterFail >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> TResourceBroker::TestOverusageDifferentResources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-02-19T08:15:04.637672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:15:04.675456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:15:04.675768Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:15:04.684863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:15:04.685118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:15:04.685385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:15:04.685528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:15:04.685660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:15:04.685815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:15:04.685930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:15:04.686056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:15:04.686186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:15:04.686321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.686465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:15:04.686614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:15:04.686774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:15:04.719354Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:15:04.719596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:15:04.719689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:15:04.719910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:04.720081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:15:04.720168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:15:04.720219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:15:04.720336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:15:04.720413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:15:04.720466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:15:04.720498Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:15:04.720710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:04.720778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:15:04.720884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:15:04.720917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:15:04.721020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:15:04.721081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:15:04.721155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:15:04.721205Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:15:04.721264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:15:04.721311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:15:04.721366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:15:04.721418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:15:04.721465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:15:04.721509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:15:04.721749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:15:04.721807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:15:04.721861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:15:04.722018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:15:04.722073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.722106Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:15:04.722159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:15:04.722212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:15:04.722264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:15:04.722339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:15:04.722389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:15:04.722423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:15:04.722578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:15:04.722641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... OLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-02-19T08:15:08.989894Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-02-19T08:15:09.002948Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-02-19T08:15:09.003508Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1771488905723:max} readable: {1771488905723:max} at tablet 9437184 2026-02-19T08:15:09.003667Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-02-19T08:15:09.008103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488905723:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:15:09.008236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488905723:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:15:09.008948Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488905723:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-02-19T08:15:09.011470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488905723:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:15:09.090010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488905723:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:561:2573];trace_detailed=; 2026-02-19T08:15:09.091345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-02-19T08:15:09.091704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-02-19T08:15:09.092147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.092309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.092635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:15:09.092821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.093052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.093343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:561:2573] finished for tablet 9437184 2026-02-19T08:15:09.093878Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:555:2567];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":5291034,"name":"_full_task","f":5291034,"d_finished":0,"c":0,"l":5294542,"d":3508},"events":[{"name":"bootstrap","f":5291385,"d_finished":2079,"c":1,"l":5293464,"d":2079},{"a":5293730,"name":"ack","f":5293730,"d_finished":0,"c":0,"l":5294542,"d":812},{"a":5293699,"name":"processing","f":5293699,"d_finished":0,"c":0,"l":5294542,"d":843},{"name":"ProduceResults","f":5293065,"d_finished":807,"c":2,"l":5294206,"d":807},{"a":5294212,"name":"Finish","f":5294212,"d_finished":0,"c":0,"l":5294542,"d":330}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.093985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:555:2567];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:15:09.094489Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:555:2567];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":5291034,"name":"_full_task","f":5291034,"d_finished":0,"c":0,"l":5295156,"d":4122},"events":[{"name":"bootstrap","f":5291385,"d_finished":2079,"c":1,"l":5293464,"d":2079},{"a":5293730,"name":"ack","f":5293730,"d_finished":0,"c":0,"l":5295156,"d":1426},{"a":5293699,"name":"processing","f":5293699,"d_finished":0,"c":0,"l":5295156,"d":1457},{"name":"ProduceResults","f":5293065,"d_finished":807,"c":2,"l":5294206,"d":807},{"a":5294212,"name":"Finish","f":5294212,"d_finished":0,"c":0,"l":5295156,"d":944}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:15:09.094622Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:15:09.011423Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:15:09.094671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:15:09.094804Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |90.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::ZeroRange |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TestMalformedRequest::CompressedGzipContentLengthNone >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2026-02-19T08:15:10.075857Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2026-02-19T08:15:09.585552Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2026-02-19T08:15:09.585827Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2026-02-19T08:15:09.586066Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |90.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> Channels20::CaIntegrationIc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2026-02-19T08:15:11.244347Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-02-19T08:15:11.255391Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-02-19T08:15:11.263076Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-02-19T08:15:11.299473Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.318774Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-02-19T08:15:11.354694Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.354931Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.354985Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.355113Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-02-19T08:15:11.355228Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.355331Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-02-19T08:15:11.355987Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-02-19T08:15:11.358487Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-02-19T08:15:11.366663Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.366756Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.366862Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-02-19T08:15:11.366929Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 2026-02-19T08:15:11.367156Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.367351Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.367538Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.367697Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.367838Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-02-19T08:15:11.369433Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.369567Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.369674Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2026-02-19T08:15:11.369736Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 5000 to# 10000 2026-02-19T08:15:11.369943Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.370136Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.370322Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.370562Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-02-19T08:15:11.370719Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-02-19T08:15:11.371242Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.371311Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.371410Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2026-02-19T08:15:11.371445Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 10000 to# 15000 2026-02-19T08:15:11.371626Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> TTxAllocatorClientTest::InitiatingRequest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink-UseStreamIndex [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink+UseStreamIndex [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink+UseStreamIndex >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2026-02-19T08:15:12.462634Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-02-19T08:15:12.463190Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-02-19T08:15:12.463971Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-02-19T08:15:12.465552Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.466090Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-02-19T08:15:12.476672Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.476915Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.476976Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.477070Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-02-19T08:15:12.477194Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.477309Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-02-19T08:15:12.477444Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-02-19T08:15:12.478401Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-02-19T08:15:12.478960Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.479032Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:12.479146Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-02-19T08:15:12.479202Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::CaIntegrationIc [GOOD] Test command err: 2026-02-19T08:13:46.898850Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486573376418928:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:46.901502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:46.973495Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486574241701338:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:46.992488Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:47.242059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:47.245815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:47.286273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:47.286552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:47.288991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:47.289086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:47.299058Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:13:47.299245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:47.326149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:47.410729Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:47.501249Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:47.502613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:47.512529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:47.512554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:47.512570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:47.512703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:47.910057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:47.997848Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:48.072830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:48.192868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.457118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.689415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:48.822106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.078534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486594851257770:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.078642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.078967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486594851257780:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.079039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.419053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.482668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.566744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.625201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.691430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.761344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.876191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.899789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486573376418928:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:51.899838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:51.932007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.967878Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608486574241701338:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:51.969781Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:52.049217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599146226257:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.049293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486599146226262:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.049296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.049478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760848 ... 53.230831Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:53.497879Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7608486843079759023:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:53.497979Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:58.767590Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608486883895551595:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.767747Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.768168Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608486883895551606:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.768227Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.869970Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.048762Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.214289Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.359078Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.494707Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.703205Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:00.032691Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:00.263936Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:00.616522Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608486892485487231:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:00.616653Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:00.616764Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608486892485487236:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:00.621956Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608486892485487238:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:00.622056Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:00.624478Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:00.684578Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7608486892485487239:2424], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730670 completed, doublechecking } 2026-02-19T08:15:00.753327Z node 11 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [11:7608486892485487313:4627] txid# 281474976730671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:03.845876Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:15:03.845928Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:04.269960Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976730673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:15:06.037290Z node 11 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=13, NodeActorId=[11:7608486913960324539:2579], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:06.037383Z node 11 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=12, NodeActorId=[11:7608486913960324540:2580], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:06.037422Z node 11 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=14, NodeActorId=[11:7608486913960324541:2581], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:06.076026Z node 12 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=11, NodeActorId=[12:7608486914044430392:2439], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:06.185153Z node 14 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=11, NodeActorId=[14:7608486912570846403:2439], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:06.238971Z node 11 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x3, to NodeId=13, NodeActorId=[11:7608486913960324539:2579], Gen=1.1, Next Delay=0.400000s 2026-02-19T08:15:06.264453Z node 13 :KQP_CHANNELS WARN: dq_channel_service.cpp:1602: NODE RECONCILIATION x2, to NodeId=11, NodeActorId=[13:7608486916094204054:2451], Gen=1.1, Next Delay=0.200000s 2026-02-19T08:15:07.231752Z node 12 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [495667a0a4dcb2a4] Result# TEvPutResult {Id# [72075186224037903:1:8:0:0:98:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037903:1:8:0:0:98:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 11 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:15:07.232052Z node 12 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.435 QueryCount# 3 VDiskId# [82000000:1:0:0:0] NodeId# 11 Subrequests# [{ BlobId# [72075186224037903:1:8:0:0:98:1] }, { BlobId# [72075186224037903:1:8:1:24576:90:1] }, { BlobId# [72075186224037903:1:8:1:32768:37:1] }] } TEvVPutResult{ TimestampMs# 0.939 VDiskId# [82000000:1:0:0:0] NodeId# 11 Status# ERROR ErrorReason# "BS_QUEUE: event undelivered" SubrequestResults# [{ BlobId# [72075186224037903:1:8:0:0:98:1] Status# ERROR }, { BlobId# [72075186224037903:1:8:1:24576:90:1] Status# ERROR }, { BlobId# [72075186224037903:1:8:1:32768:37:1] Status# ERROR }] } ] } 2026-02-19T08:15:07.239543Z node 14 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [3c0696a66a5c163a] Result# TEvPutResult {Id# [72075186224037906:1:8:0:0:98:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037906:1:8:0:0:98:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 11 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2026-02-19T08:15:07.336122Z node 13 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [4dacb8dbebabea61] Result# TEvPutResult {Id# [72075186224037910:1:8:0:0:98:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037910:1:8:0:0:98:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 11 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpRm::NotEnoughMemory >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpRm::NotEnoughMemory [GOOD] >> TableWriter::Backup [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2026-02-19T08:15:16.092978Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:15:16.093556Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000a26/r3tmp/tmpmuhTBw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:15:16.094227Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000a26/r3tmp/tmpmuhTBw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000a26/r3tmp/tmpmuhTBw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18025609152525732438 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:15:16.148525Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:16.148859Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:16.164122Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-02-19T08:15:16.164259Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-02-19T08:15:16.164311Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-02-19T08:15:16.164365Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-02-19T08:15:16.164561Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:16.164613Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:16.164660Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:16.164682Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:16.164778Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:16.188701Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488916 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:16.189052Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:16.189141Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488916 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:16.189521Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:16.190219Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:16.190514Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:16.190560Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:16.190680Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488916 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:16.190981Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:16.191013Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:16.191097Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488916 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:16.191765Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-02-19T08:15:16.191975Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:16.192439Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:16.192567Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:16.192721Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:16.193002Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:16.193087Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:16.193299Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:16.193568Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:16.193655Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 >> YdbIndexTable::OnlineBuild |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> YdbIndexTable::MultiShardTableOneIndex |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpScheme::ResourcePoolClassifiersRankValidation [GOOD] >> KqpScheme::ParallelDropStreamingQuery >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TestMalformedRequest::CompressedGzipContentLengthLower |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |90.9%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> TableWriter::Restore >> TableWriter::Restore [GOOD] >> KqpRm::NotEnoughExecutionUnits >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> KqpRm::ManyTasks >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] |90.9%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::NotEnoughExecutionUnits [GOOD] |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> KqpRm::ManyTasks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2026-02-19T08:15:21.816691Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:15:21.817113Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000a25/r3tmp/tmpbflke4/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:15:21.817725Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000a25/r3tmp/tmpbflke4/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000a25/r3tmp/tmpbflke4/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9400911700258648126 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:15:21.862099Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:21.862451Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:21.880480Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-02-19T08:15:21.880623Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-02-19T08:15:21.880669Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-02-19T08:15:21.880723Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-02-19T08:15:21.880893Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:21.880958Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:21.881000Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:21.881020Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:21.881151Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:21.900916Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488921 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:21.901197Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:21.901278Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488921 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:21.901580Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:21.901755Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:21.901927Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:21.901966Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:21.902081Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488921 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:21.902331Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:21.902409Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:21.902477Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488921 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:21.903050Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-02-19T08:15:21.903215Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:21.903769Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:21.903891Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:21.904033Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:21.904241Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:21.904343Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:21.904582Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:21.904749Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:21.904845Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink+UseStreamIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2026-02-19T08:15:22.265174Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:15:22.265780Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000a24/r3tmp/tmpk79ceZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:15:22.266460Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000a24/r3tmp/tmpk79ceZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000a24/r3tmp/tmpk79ceZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1064303912629215139 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:15:22.347312Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:22.347684Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:22.382817Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-02-19T08:15:22.382988Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-02-19T08:15:22.383040Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-02-19T08:15:22.383096Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-02-19T08:15:22.383275Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:22.383325Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:22.383368Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:22.383422Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:22.383527Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:22.419357Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488922 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:22.419688Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:22.419784Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488922 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:22.420149Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:22.420309Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:22.420477Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:22.420515Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:22.420623Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488922 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:22.420867Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:22.420896Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:22.420967Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488922 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:22.421630Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-02-19T08:15:22.421855Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:22.422417Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:22.422545Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:22.422667Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:22.423063Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:22.423153Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:22.423375Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:22.423572Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:22.423651Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:22.426606Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.426682Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.426739Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.426782Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.426826Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:468:2348])) 2026-02-19T08:15:22.427059Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.427332Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.427381Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427422Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.427455Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427477Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:468:2348])) 2026-02-19T08:15:22.427508Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.427629Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-3-3 (3 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.427654Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-3-3 (3 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427674Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.427690Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-3-3 (3 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427708Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:468:2348])) 2026-02-19T08:15:22.427725Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.427797Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-4-4 (4 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.427818Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-4-4 (4 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427836Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.427852Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-4-4 (4 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.427874Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:468:2348])) 2026-02-19T08:15:22.427902Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.427978Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-5-5 (5 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.428004Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-5-5 (5 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428033Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.428055Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-5-5 (5 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428077Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:468:2348])) 2026-02-19T08:15:22.428106Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.428194Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-6-6 (6 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.428214Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-6-6 (6 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428230Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.428246Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-6-6 (6 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428261Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:468:2348])) 2026-02-19T08:15:22.428276Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.428344Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-7-7 (7 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.428365Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-7-7 (7 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428387Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.428410Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-7-7 (7 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428441Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:468:2348])) 2026-02-19T08:15:22.428464Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.428539Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-8-8 (8 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.428571Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-8-8 (8 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428595Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.428618Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-8-8 (8 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428633Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:468:2348])) 2026-02-19T08:15:22.428656Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.428715Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-9-9 (9 by [1:468:2348]) priority=0 resources={0, 100} 2026-02-19T08:15:22.428728Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-9-9 (9 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428749Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-02-19T08:15:22.428773Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-9-9 (9 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-02-19T08:15:22.428788Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:468:2348])) 2026-02-19T08:15:22.428804Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-02-19T08:15:22.428869Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:468:2348]) (release resources {0, 100}) 2026-02-19T08:15:22.428926Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:468:2348])) 2026-02-19T08:15:22.428967Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-02-19T08:14:53.254665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486864518593074:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:53.272581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:53.701290Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:53.764818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:53.764937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:53.783555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:53.946035Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:54.004420Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:54.289854Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:54.374442Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486864518593282:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:54.374506Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486868813561045:2442] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:54.374695Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608486864518593289:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:14:54.374780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608486864518593517:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608486864518593289:2146], cookie# 1 2026-02-19T08:14:54.376646Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486864518593573:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593570:2290], cookie# 1 2026-02-19T08:14:54.376681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486864518593574:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593571:2290], cookie# 1 2026-02-19T08:14:54.376695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608486864518593575:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593572:2290], cookie# 1 2026-02-19T08:14:54.376745Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486860223625629:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593573:2290], cookie# 1 2026-02-19T08:14:54.376774Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486860223625632:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593574:2290], cookie# 1 2026-02-19T08:14:54.376809Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608486860223625635:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608486864518593575:2290], cookie# 1 2026-02-19T08:14:54.376975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486864518593573:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486860223625629:2050], cookie# 1 2026-02-19T08:14:54.376994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486864518593574:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486860223625632:2053], cookie# 1 2026-02-19T08:14:54.377009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608486864518593575:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486860223625635:2056], cookie# 1 2026-02-19T08:14:54.377046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486864518593517:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486864518593570:2290], cookie# 1 2026-02-19T08:14:54.377070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608486864518593517:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:14:54.377086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486864518593517:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486864518593571:2290], cookie# 1 2026-02-19T08:14:54.377112Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608486864518593517:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:14:54.377137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608486864518593517:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608486864518593572:2290], cookie# 1 2026-02-19T08:14:54.377192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608486864518593517:2290][/dc-1] Sync cookie mismatch: sender# [1:7608486864518593572:2290], cookie# 1, current cookie# 0 2026-02-19T08:14:54.377257Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608486864518593289:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:14:54.388995Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608486864518593289:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608486864518593517:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:14:54.389166Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608486864518593289:2146], cacheItem# { Subscriber: { Subscriber: [1:7608486864518593517:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:14:54.410541Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608486868813561046:2443], recipient# [1:7608486868813561045:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:14:54.410665Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486868813561045:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:54.545981Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486868813561045:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:14:54.549543Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486868813561045:2442] Handle TEvDescribeSchemeResult Forward to# [1:7608486868813561044:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:54.569958Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486864518593282:2143] Handle TEvProposeTransaction 2026-02-19T08:14:54.569984Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486864518593282:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:14:54.570046Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486864518593282:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486868813561052:2448] 2026-02-19T08:14:54.795116Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486868813561052:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-numbe ... esNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:18.245467Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486972247342124:4038], recipient# [3:7608486972247342123:2357], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:18.985068Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:18.985240Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486925002700042:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:18.985405Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486972247342144:4042], recipient# [3:7608486972247342143:2358], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:18.993420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:18.993560Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486925002700042:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:18.993720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486972247342146:4043], recipient# [3:7608486972247342145:2359], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.246710Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.246864Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486942182569952:3177] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:19.246967Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486976542309444:4044], recipient# [3:7608486976542309443:2360], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.986079Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.986211Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486925002700042:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:19.986300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486976542309464:4048], recipient# [3:7608486976542309463:2361], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.994141Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:19.994257Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486925002700042:2605] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:19.994347Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486976542309466:4049], recipient# [3:7608486976542309465:2362], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:20.248659Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608486920707732085:2154], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:15:20.248809Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608486920707732085:2154], cacheItem# { Subscriber: { Subscriber: [3:7608486942182569952:3177] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:15:20.248934Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608486980837276764:4050], recipient# [3:7608486980837276763:2363], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> THiveTest::TestLockTabletExecutionReconnectExpire >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink+UseStreamIndex [GOOD] Test command err: 2026-02-19T08:14:52.667248Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486858110328783:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:52.667297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:53.308461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:14:53.342001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:53.342131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:53.442669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:53.449982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:53.457881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486858110328554:2081] 1771488892632000 != 1771488892632003 2026-02-19T08:14:53.565813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:14:53.597991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:53.858162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:53.858204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:53.858212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:53.858309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:54.568824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:54.575578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:54.636927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:54.912222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:55.228735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:55.378134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:57.673369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486858110328783:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:57.673493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:14:58.041886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486883880134224:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.042010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.042793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486883880134234:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.042845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:58.615124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.664556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.751354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.801045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.839334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.895005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:58.943028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.024023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:14:59.145495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486888175102410:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:59.145590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:59.145971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486888175102415:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:59.146022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486888175102416:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:59.146135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:14:59.152128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:14:59.186291Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486888175102419:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:14:59.272543Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486888175102470:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:01.366414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486896765037389:3983], Recipient [1:7608486858110328914:2145]: NKikimr::TE ... 2026-02-19T08:15:22.003259Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2026-02-19T08:15:22.003309Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2026-02-19T08:15:22.003345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2026-02-19T08:15:22.003376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710673 2026-02-19T08:15:22.003407Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710673:0, at schemeshard: 72057594046644480 2026-02-19T08:15:22.003656Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003668Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976710673:0 2026-02-19T08:15:22.003723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [3:7608486982709126047:2543] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710673 at schemeshard: 72057594046644480 2026-02-19T08:15:22.003786Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710673:2, at schemeshard: 72057594046644480 2026-02-19T08:15:22.003793Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.003800Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976710673:2 2026-02-19T08:15:22.003829Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [3:7608486982709126046:2542] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710673 at schemeshard: 72057594046644480 2026-02-19T08:15:22.003893Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [3:7608486948349384886:2156], Recipient [3:7608486948349384886:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:15:22.003913Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:15:22.003950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710673:0, at schemeshard: 72057594046644480 2026-02-19T08:15:22.003975Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046644480] TDone opId# 281474976710673:0 ProgressState 2026-02-19T08:15:22.004047Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:15:22.004062Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710673:0 progress is 2/3 2026-02-19T08:15:22.004073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 2/3 2026-02-19T08:15:22.004089Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710673:0 progress is 2/3 2026-02-19T08:15:22.004099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 2/3 2026-02-19T08:15:22.004113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710673, ready parts: 2/3, is published: true 2026-02-19T08:15:22.004286Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [3:7608486948349384886:2156], Recipient [3:7608486948349384886:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:15:22.004309Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:15:22.004331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710673:2, at schemeshard: 72057594046644480 2026-02-19T08:15:22.004345Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046644480] TDone opId# 281474976710673:2 ProgressState 2026-02-19T08:15:22.004401Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:15:22.004414Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710673:2 progress is 3/3 2026-02-19T08:15:22.004422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 3/3 2026-02-19T08:15:22.004437Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710673:2 progress is 3/3 2026-02-19T08:15:22.004444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 3/3 2026-02-19T08:15:22.004453Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710673, ready parts: 3/3, is published: true 2026-02-19T08:15:22.004494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:7608486982709126017:2540] message: TxId: 281474976710673 2026-02-19T08:15:22.004511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710673 ready parts: 3/3 2026-02-19T08:15:22.004535Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710673:0 2026-02-19T08:15:22.004545Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710673:0 2026-02-19T08:15:22.004645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 54] was 4 2026-02-19T08:15:22.004660Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710673:1 2026-02-19T08:15:22.004667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710673:1 2026-02-19T08:15:22.004682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 3 2026-02-19T08:15:22.004689Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710673:2 2026-02-19T08:15:22.004695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710673:2 2026-02-19T08:15:22.004726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 56] was 3 2026-02-19T08:15:22.005257Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.005317Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:15:22.005362Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [3:7608486982709126017:2540] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710673 at schemeshard: 72057594046644480 2026-02-19T08:15:22.005522Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486982709126115:4038], Recipient [3:7608486948349384886:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.005546Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.005558Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:22.006098Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486982709126026:3975], Recipient [3:7608486948349384886:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.006115Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.006124Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:22.007136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486982709126114:4037], Recipient [3:7608486948349384886:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.007157Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:22.007167Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:22.418836Z node 3 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:15:22.649783Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:7608486948349384886:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:15:22.649828Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:15:22.649876Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [3:7608486948349384886:2156], Recipient [3:7608486948349384886:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:15:22.649894Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> KqpRm::NodesMembershipByExchanger >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TestMalformedRequest::CompressedGzipContentLengthLower [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/types/ydb-core-kqp-ut-olap-types |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/types/ydb-core-kqp-ut-olap-types |91.0%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/types/ydb-core-kqp-ut-olap-types >> TestMalformedRequest::CompressedGzipContentLengthHigher >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:15:10.255785Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486937152263351:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:10.256084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:10.303770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:10.320435Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.367940Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.857794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.865861Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.866004Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:11.004578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:11.004699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:11.019449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:11.019519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:11.158353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.185776Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:11.276788Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.286476Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:11.301194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.356648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.365261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.384015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.874547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007b0/r3tmp/yandexVLZUtx.tmp 2026-02-19T08:15:11.874569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007b0/r3tmp/yandexVLZUtx.tmp 2026-02-19T08:15:11.874720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007b0/r3tmp/yandexVLZUtx.tmp 2026-02-19T08:15:11.874809Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:11.968991Z INFO: TTestServer started on Port 13921 GrpcPort 25230 PQClient connected to localhost:25230 === TenantModeEnabled() = 1 === Init PQ - start server on port 25230 2026-02-19T08:15:12.368970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:15:12.369119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.369273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:15:12.369304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:15:12.369425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:12.369474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:12.372166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:12.372248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:15:12.372357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.372395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:15:12.372421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-02-19T08:15:12.372442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 CreateParts -> ConfigureParts 2026-02-19T08:15:12.374078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:12.374133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-02-19T08:15:12.374157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:12.376043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.376077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:15:12.376095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 ConfigureParts -> Propose 2026-02-19T08:15:12.381104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.381153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.381171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.381187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:15:12.396508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:15:12.399076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-02-19T08:15:12.399195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:15:12.401684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488912445, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:12.401874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488912445 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:12.401899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.402235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 Propose -> Done 2026-02-19T08:15:12.402292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.402467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:15:12.402524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T08:15:12.404682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:15:12.404710Z node 1 :FLAT_TX_SCHEMESHA ... erationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976710666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:59020" , at schemeshard: 72057594046644480 2026-02-19T08:15:25.486997Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710666:0, at schemeshard: 72057594046644480 2026-02-19T08:15:25.487101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-02-19T08:15:25.487113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-02-19T08:15:25.487229Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:25.487255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:15:25.487326Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-02-19T08:15:25.487335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-02-19T08:15:25.487353Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-02-19T08:15:25.487360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-02-19T08:15:25.487397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-02-19T08:15:25.487435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710666, ready parts: 1/1, is published: false 2026-02-19T08:15:25.487452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-02-19T08:15:25.487461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-02-19T08:15:25.487471Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710666:0 2026-02-19T08:15:25.487480Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710666, publications: 1, subscribers: 0 2026-02-19T08:15:25.487490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710666, [OwnerId: 72057594046644480, LocalPathId: 46], 4 2026-02-19T08:15:25.489682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710666, response: Status: StatusSuccess TxId: 281474976710666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 ===Wait for session created with token with removed ACE to die2026-02-19T08:15:25.490039Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2026-02-19T08:15:25.490191Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:15:25.490205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710666, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-02-19T08:15:25.490379Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:15:25.490395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7608486977316423722:2400], at schemeshard: 72057594046644480, txId: 281474976710666, path id: 46 2026-02-19T08:15:25.491478Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-02-19T08:15:25.491543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-02-19T08:15:25.491554Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710666 2026-02-19T08:15:25.491569Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710666, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 4 2026-02-19T08:15:25.491584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-02-19T08:15:25.491661Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710666, subscribers: 0 2026-02-19T08:15:25.496721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 2026-02-19T08:15:26.081442Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608487003086228792:2388], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:26.082257Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NjgzNTU3NTUtMWYzZDYzZDQtNGUzMzg4MDctN2YzNmM3ZjA=, ActorId: [3:7608487003086228785:2384], ActorState: ExecuteState, LegacyTraceId: 01khtffpaz7b9659dqdb8awagj, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:26.082818Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2026-02-19T08:15:26.475161Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-02-19T08:15:26.476216Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: test-group-id|655aa339-716d07bf-a695c964-60467d19_0 describe result for acl check 2026-02-19T08:15:26.476343Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|655aa339-716d07bf-a695c964-60467d19_0 2026-02-19T08:15:26.476691Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|655aa339-716d07bf-a695c964-60467d19_0 is DEAD 2026-02-19T08:15:26.476990Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:15:27.116934Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608487007381196111:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:27.117422Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NDYyZmNkOTktOWZiN2E5NmMtZjAxMmNmYy1hNTU0ZDVkYg==, ActorId: [3:7608487007381196109:2394], ActorState: ExecuteState, LegacyTraceId: 01khtffqbeckhkndd89kxnksdp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:27.117855Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:15:10.711421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486935144305608:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:10.711791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:10.801808Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.905211Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:11.357827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:11.358021Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:11.413125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:11.512575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:11.521884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:11.523074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:11.523125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:11.682019Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:11.687618Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:11.725856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:11.726003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.726036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:11.748388Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:11.748556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.773824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.884909Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:12.182565Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:12.254156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:12.280986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007af/r3tmp/yandexfygYrK.tmp 2026-02-19T08:15:12.281026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007af/r3tmp/yandexfygYrK.tmp 2026-02-19T08:15:12.281224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007af/r3tmp/yandexfygYrK.tmp 2026-02-19T08:15:12.281319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:12.398279Z INFO: TTestServer started on Port 23321 GrpcPort 30508 PQClient connected to localhost:30508 === TenantModeEnabled() = 1 === Init PQ - start server on port 30508 2026-02-19T08:15:13.068850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:15:13.074777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.075028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:15:13.075054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:15:13.075244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:13.075296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:13.080901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:13.081057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:15:13.081212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.081253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:15:13.081269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-02-19T08:15:13.081288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 CreateParts -> ConfigureParts 2026-02-19T08:15:13.092518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:13.092556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-02-19T08:15:13.092585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:13.093189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.093219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:15:13.093253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 ConfigureParts -> Propose 2026-02-19T08:15:13.095198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.095242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.095270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.095301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-02-19T08:15:13.100037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:15:13.102220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-02-19T08:15:13.102334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:15:13.104922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488913152, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:13.105056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488913152 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:13.105114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.105467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 Propose -> Done 2026-02-19T08:15:13.105507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.105668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: ... 19T08:15:26.573368Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-02-19T08:15:26.573381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-02-19T08:15:26.573403Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-02-19T08:15:26.573414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-02-19T08:15:26.573451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-02-19T08:15:26.573493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2026-02-19T08:15:26.573516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-02-19T08:15:26.573529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-02-19T08:15:26.573555Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-02-19T08:15:26.573568Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2026-02-19T08:15:26.573581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-02-19T08:15:26.582930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:26.583399Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2026-02-19T08:15:26.583556Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:15:26.583572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-02-19T08:15:26.583803Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:15:26.583819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7608486981460013241:2385], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 46 2026-02-19T08:15:26.585083Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-02-19T08:15:26.585175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-02-19T08:15:26.585189Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2026-02-19T08:15:26.585203Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-02-19T08:15:26.585223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-02-19T08:15:26.585308Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2026-02-19T08:15:26.589246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2026-02-19T08:15:26.589289Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-02-19T08:15:26.589304Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-02-19T08:15:26.589665Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2026-02-19T08:15:26.589771Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:50366 2026-02-19T08:15:26.589792Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1556: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:50366 proto=v1 user=- topic=Topic /Root/acc/topic1 in database: /Root durationSec=0 2026-02-19T08:15:26.589801Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-02-19T08:15:26.590710Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-02-19T08:15:26.590887Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-02-19T08:15:26.590897Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-02-19T08:15:26.590906Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-02-19T08:15:26.590943Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7608487002934850966:2379] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-02-19T08:15:26.590965Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-02-19T08:15:26.591474Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-02-19T08:15:26.591703Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-02-19T08:15:26.592077Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 2026-02-19T08:15:26.593369Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-02-19T08:15:26.593562Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-02-19T08:15:26.593599Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:828: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 2026-02-19T08:15:26.593792Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|a28f9741-3d2c2abd-267f8808-97a7bcbf_0 is DEAD 2026-02-19T08:15:26.594055Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:15:27.082422Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608487007229818281:2383], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:27.084667Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=M2UzZWVmYWMtNWUxMThhOTItMjkzNjYwYmQtNTZjNTIwY2I=, ActorId: [3:7608487007229818279:2382], ActorState: ExecuteState, LegacyTraceId: 01khtffqakdhdf2hzwvw0whhdc, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:27.085428Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:15:10.352358Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486936848752393:2226];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:10.352652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:10.391346Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.393412Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486933992241279:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:10.404982Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:10.420012Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.861914Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.878032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.950977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:10.951107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:10.957615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:10.957729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:11.081137Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:11.081309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.098328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:11.179155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.182105Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:11.193860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.381970Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.438130Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.672405Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008862s 2026-02-19T08:15:11.873991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007b3/r3tmp/yandexs9wuFU.tmp 2026-02-19T08:15:11.874020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007b3/r3tmp/yandexs9wuFU.tmp 2026-02-19T08:15:11.874181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007b3/r3tmp/yandexs9wuFU.tmp 2026-02-19T08:15:11.874263Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:11.980153Z INFO: TTestServer started on Port 62071 GrpcPort 18544 PQClient connected to localhost:18544 === TenantModeEnabled() = 1 === Init PQ - start server on port 18544 2026-02-19T08:15:12.824394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:15:12.830131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.830490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:15:12.830516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:15:12.830683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:12.830763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:12.838836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:12.839017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:15:12.839207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.839250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:15:12.839265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-02-19T08:15:12.839286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 CreateParts -> ConfigureParts 2026-02-19T08:15:12.845879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:12.845917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-02-19T08:15:12.845935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:12.847028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.847074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:15:12.847096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 ConfigureParts -> Propose 2026-02-19T08:15:12.855049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.855131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:12.855160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.855202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:15:12.863624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:15:12.867730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-02-19T08:15:12.867818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:15:12.869726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488912914, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:12.869826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488912914 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:12.869849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.870063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 Propose -> Done 2026-02-19T08:15:12.870089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:12.870212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:15:12.870247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T08:15:12.872267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPubl ... 104857600 } } } 2026-02-19T08:15:27.165084Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 auth for : consumer_aba 2026-02-19T08:15:27.165701Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 Handle describe topics response 2026-02-19T08:15:27.165848Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 auth is DEAD 2026-02-19T08:15:27.165922Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1061: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 auth ok: topics# 1, initDone# 0 2026-02-19T08:15:27.167833Z :INFO: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] [null] Server session id: consumer_aba_3_2_3029317449921160880_v1 2026-02-19T08:15:27.168099Z :DEBUG: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:15:27.167148Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1232: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 register session: topic# /Root/account1/write_topic 2026-02-19T08:15:27.170461Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1353: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 user=- topic=Topic /Root/account1/write_topic in database: Root assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_3029317449921160880_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7608487009868294308 RawX2: 4503612512274778 } Path: "/Root/account1/write_topic" } 2026-02-19T08:15:27.170567Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1204: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2026-02-19T08:15:27.170967Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1022: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7608487009868294310:2397] 2026-02-19T08:15:27.168721Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][write_topic] pipe [3:7608487009868294308:2394] connected; active server actors: 1 2026-02-19T08:15:27.169185Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7608487009868294308:2394] session consumer_aba_3_2_3029317449921160880_v1 2026-02-19T08:15:27.169258Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2026-02-19T08:15:27.169319Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2026-02-19T08:15:27.169363Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_3029317449921160880_v1" (Sender=[3:7608487009868294305:2394], Pipe=[3:7608487009868294308:2394], Partitions=[], ActiveFamilyCount=0) 2026-02-19T08:15:27.169401Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2026-02-19T08:15:27.169464Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-02-19T08:15:27.169521Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_3029317449921160880_v1" (Sender=[3:7608487009868294305:2394], Pipe=[3:7608487009868294308:2394], Partitions=[], ActiveFamilyCount=0) 2026-02-19T08:15:27.169593Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_3029317449921160880_v1" sender [3:7608487009868294305:2394] lock partition 0 for ReadingSession "consumer_aba_3_2_3029317449921160880_v1" (Sender=[3:7608487009868294305:2394], Pipe=[3:7608487009868294308:2394], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-02-19T08:15:27.169666Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-02-19T08:15:27.169696Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000207s 2026-02-19T08:15:27.172013Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_3029317449921160880_v1:1 with generation 1 2026-02-19T08:15:27.173912Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 grpc read done: success# 1, data# { read { } } 2026-02-19T08:15:27.174027Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1856: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 got read request: guid# 267e1148-dcd1e63d-aba9c0ca-ca5c53ea 2026-02-19T08:15:27.204115Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:903: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1771488927039 CreateTimestampMS: 1771488927035 SizeLag: 165 WriteTimestampEstimateMS: 1771488927039 ClientHasAnyCommits: false } Cookie: 2 } 2026-02-19T08:15:27.204207Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:631: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2026-02-19T08:15:27.204296Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1452: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2026-02-19T08:15:27.209872Z :INFO: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] Closing read session. Close timeout: 0.000000s 2026-02-19T08:15:27.209945Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2026-02-19T08:15:27.209995Z :INFO: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] Counters: { Errors: 0 CurrentSessionLifetimeMs: 54 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:15:27.210083Z :NOTICE: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:15:27.210136Z :DEBUG: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] [null] Abort session to cluster 2026-02-19T08:15:27.210681Z :NOTICE: [/Root] [/Root] [93be9d1c-333db757-ae40510a-491664ae] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:15:27.213375Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 grpc read done: success# 0, data# { } 2026-02-19T08:15:27.213406Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 grpc read failed 2026-02-19T08:15:27.213438Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 grpc closed 2026-02-19T08:15:27.213475Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_3029317449921160880_v1 is DEAD 2026-02-19T08:15:27.215325Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_3029317449921160880_v1 2026-02-19T08:15:27.219732Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][write_topic] pipe [3:7608487009868294308:2394] disconnected. 2026-02-19T08:15:27.219783Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][write_topic] pipe [3:7608487009868294308:2394] disconnected; active server actors: 1 2026-02-19T08:15:27.220334Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][write_topic] pipe [3:7608487009868294308:2394] client consumer_aba disconnected session consumer_aba_3_2_3029317449921160880_v1 2026-02-19T08:15:27.297075Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608487009868294323:2402], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:27.298111Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=OWJjOTUxNWEtOTQwNjUxNTUtNWQwNmNjZWEtYTI1ZDA5NjQ=, ActorId: [3:7608487009868294316:2398], ActorState: ExecuteState, LegacyTraceId: 01khtffqh757wss8qcmd2f5aq7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:27.298515Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2026-02-19T08:15:27.681190Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:15:27.681928Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/000a22/r3tmp/tmpO0mrN1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:15:27.682704Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/000a22/r3tmp/tmpO0mrN1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/000a22/r3tmp/tmpO0mrN1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 62668570045339384 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:15:27.730943Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:27.731184Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:27.745538Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-02-19T08:15:27.745652Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-02-19T08:15:27.745686Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-02-19T08:15:27.745769Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-02-19T08:15:27.746022Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:27.746075Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:27.746116Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:27.746138Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:27.746231Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:27.763237Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488927 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:27.763540Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:27.763638Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488927 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:27.763974Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:27.764133Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:27.764300Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:27.764334Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:27.764427Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488927 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:27.764694Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:27.764723Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:27.764795Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488927 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:27.765407Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-02-19T08:15:27.765606Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:27.767262Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:27.767400Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:27.767549Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:27.767764Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:27.767858Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:27.768082Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:27.768252Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:27.768305Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:28.895894Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-02-19T08:15:28.896012Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-02-19T08:15:28.896935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:29.181635Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:15:11.790636Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486940591109878:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:11.790898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:11.870077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:11.870235Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:11.944768Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608486939245756915:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:11.944818Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:11.990284Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:12.376757Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:12.445814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:12.509110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:12.509226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:12.510933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:12.511000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:12.526629Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:12.527620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:12.574748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:12.856879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:12.857292Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:12.866942Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:12.925144Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:12.983082Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:13.132094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007ae/r3tmp/yandexsUhYmz.tmp 2026-02-19T08:15:13.132124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007ae/r3tmp/yandexsUhYmz.tmp 2026-02-19T08:15:13.138351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007ae/r3tmp/yandexsUhYmz.tmp 2026-02-19T08:15:13.138476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:13.258447Z INFO: TTestServer started on Port 14265 GrpcPort 12501 PQClient connected to localhost:12501 === TenantModeEnabled() = 1 === Init PQ - start server on port 12501 2026-02-19T08:15:13.728323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:15:13.728555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.728761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:15:13.728792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:15:13.728990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:13.729038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:13.731339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:13.731468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:15:13.731626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.731656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:15:13.731673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-02-19T08:15:13.731696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 CreateParts -> ConfigureParts 2026-02-19T08:15:13.733727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.733757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:15:13.733799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 ConfigureParts -> Propose 2026-02-19T08:15:13.735418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:13.735435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-02-19T08:15:13.735455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:13.735812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.735847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:13.735871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.735919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:15:13.757732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:15:13.759643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-02-19T08:15:13.759741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:15:13.766709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488913803, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:13.766833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488913803 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:13.766869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.767110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 Propose -> Done 2026-02-19T08:15:13.767146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:15:13.767276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:15:13.767315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T0 ... lication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2026-02-19T08:15:27.368081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7608487006924454603:2364] 2026-02-19T08:15:27.370204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2026-02-19T08:15:27.370248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter 2026-02-19T08:15:27.483064Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-02-19T08:15:27.483093Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 1 === InitializeWritePQService Write 2026-02-19T08:15:27.483900Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2026-02-19T08:15:27.483984Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:56214 2026-02-19T08:15:27.484004Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1556: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56214 proto=v1 user=- topic=Topic Root/acc/topic1 in database: /Root durationSec=0 2026-02-19T08:15:27.484014Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-02-19T08:15:27.487016Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 1 sessionId: describe result for acl check 2026-02-19T08:15:27.487139Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-02-19T08:15:27.487147Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-02-19T08:15:27.487159Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-02-19T08:15:27.487187Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7608487006924454789:2368] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-02-19T08:15:27.487202Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-02-19T08:15:27.487956Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-02-19T08:15:27.488162Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|ec68079-81026c50-121f5ec9-e6018049_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-02-19T08:15:27.488658Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|ec68079-81026c50-121f5ec9-e6018049_0 2026-02-19T08:15:27.489871Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 12345678|ec68079-81026c50-121f5ec9-e6018049_0 grpc read done: success: 0 data: 2026-02-19T08:15:27.489889Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 12345678|ec68079-81026c50-121f5ec9-e6018049_0 grpc read failed 2026-02-19T08:15:27.490043Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 1 sessionId: 12345678|ec68079-81026c50-121f5ec9-e6018049_0 2026-02-19T08:15:27.490057Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 12345678|ec68079-81026c50-121f5ec9-e6018049_0 is DEAD 2026-02-19T08:15:27.490276Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2026-02-19T08:15:27.503238Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-02-19T08:15:27.503260Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-02-19T08:15:27.503655Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2026-02-19T08:15:27.503743Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:460: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:56214 2026-02-19T08:15:27.503763Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1556: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56214 proto=v1 user=- topic=Topic topic1 in database: /Root/acc durationSec=0 2026-02-19T08:15:27.503773Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:584: init check schema 2026-02-19T08:15:27.506185Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:645: session v1 cookie: 2 sessionId: describe result for acl check 2026-02-19T08:15:27.506308Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-02-19T08:15:27.506317Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-02-19T08:15:27.506324Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-02-19T08:15:27.506368Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7608487006924454809:2377] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-02-19T08:15:27.506399Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:707: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-02-19T08:15:27.507115Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-02-19T08:15:27.507300Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-02-19T08:15:27.507661Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 2026-02-19T08:15:27.510627Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 grpc read done: success: 0 data: 2026-02-19T08:15:27.510643Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 grpc read failed 2026-02-19T08:15:27.510663Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 grpc closed 2026-02-19T08:15:27.510677Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 12345678|1002753f-2e28cf7c-be84e40b-f0352e7d_0 is DEAD 2026-02-19T08:15:27.511306Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:15:28.106708Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608487011219422131:2386], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:28.108763Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=ZjVkN2RkZTItYTZkMzljYzctNGIzYTFkYzEtODJmNTAzNjk=, ActorId: [3:7608487011219422129:2385], ActorState: ExecuteState, LegacyTraceId: 01khtffrae239r1f71v6gbp2b6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:28.109437Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:15:12.020516Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486945931938879:2270];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:12.020567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:12.150502Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:12.177624Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:12.591804Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:12.591929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:12.682263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:12.822423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:12.822534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:12.929762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:12.929875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:13.018884Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:13.030217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:13.043396Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:13.065735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:13.065863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:13.083790Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:13.083952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:13.100846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:13.176092Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:13.506158Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:13.506501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007ad/r3tmp/yandexYiC3DB.tmp 2026-02-19T08:15:13.506527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007ad/r3tmp/yandexYiC3DB.tmp 2026-02-19T08:15:13.506682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007ad/r3tmp/yandexYiC3DB.tmp 2026-02-19T08:15:13.506860Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:13.585901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:13.639918Z INFO: TTestServer started on Port 28340 GrpcPort 17987 PQClient connected to localhost:17987 === TenantModeEnabled() = 1 === Init PQ - start server on port 17987 2026-02-19T08:15:14.531394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:15:14.531624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:14.531831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:15:14.531855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:15:14.532030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:15:14.532071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:14.540713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:14.540857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:15:14.541039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:14.541091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:15:14.541108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-02-19T08:15:14.541132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 CreateParts -> ConfigureParts 2026-02-19T08:15:14.550615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:14.550662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:15:14.550691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 ConfigureParts -> Propose 2026-02-19T08:15:14.558589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:14.558628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:15:14.558669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:14.558700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-02-19T08:15:14.563112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:15:14.568589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:14.568618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-02-19T08:15:14.568642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:14.576415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-02-19T08:15:14.576547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:15:14.587928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488914622, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:15:14.588086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488914622 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:15:14.588123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:14.588374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710657:0 Propose -> Done 2026-02-19T08:15:14.588408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-02-19T08:15:14.588588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: ... : partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:28.889808Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.889819Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:15:28.981397Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:28.981429Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.981445Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:28.981464Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.981490Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:15:28.981557Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:28.981566Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.981573Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:28.981583Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.981589Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:28.983994Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:28.984021Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.984034Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:28.984061Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:28.984074Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.081727Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.081728Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.081755Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.081764Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.081767Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.081774Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.081787Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.081790Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.081801Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.081802Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.084224Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.084250Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.084259Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.084279Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.084290Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.182068Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.182071Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.182093Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.182107Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.182110Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.182125Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.182136Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.182139Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.182154Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.182167Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.184591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.184626Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.184638Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.184655Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.184668Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.282429Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.282440Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.282462Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.282466Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.282476Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.282478Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.282494Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.282495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.282509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.282509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.284933Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.284965Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.284981Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.284998Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.285009Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.382746Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.382764Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.382776Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.382785Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.382787Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.382796Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.382798Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.382806Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.382816Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.382833Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:29.385269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:29.385294Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.385305Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:29.385323Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:29.385335Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> KqpRm::DisonnectNodes >> TCmsTest::StateRequest >> TestMalformedRequest::ContentLengthCorrect >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> KqpRm::DisonnectNodes [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> TCmsTest::Notifications ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2026-02-19T08:15:32.589025Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:15:32.589670Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0002de/r3tmp/tmpb2NgZA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:15:32.590324Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0002de/r3tmp/tmpb2NgZA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0002de/r3tmp/tmpb2NgZA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 667729145249678486 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:15:32.649697Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:32.650066Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-02-19T08:15:32.684502Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-02-19T08:15:32.684611Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-02-19T08:15:32.684647Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-02-19T08:15:32.684690Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-02-19T08:15:32.684823Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:32.684865Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:32.684896Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-02-19T08:15:32.684917Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-02-19T08:15:32.685004Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:32.696251Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488932 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:32.696499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:32.696581Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488932 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:32.696860Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:32.696940Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:32.697050Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:32.697073Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:32.697156Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1771488932 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:32.697342Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-02-19T08:15:32.697368Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-02-19T08:15:32.697426Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1771488932 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-02-19T08:15:32.698034Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-02-19T08:15:32.698222Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:32.698563Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:32.698679Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:32.698805Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:32.699017Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:32.699102Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:32.699343Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:32.699477Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:15:32.699545Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:15:33.728296Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-02-19T08:15:33.728409Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-02-19T08:15:33.729006Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-02-19T08:15:33.729117Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-02-19T08:15:33.729196Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-02-19T08:15:33.729885Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:151:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-02-19T08:15:33.730048Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-02-19T08:15:33.730500Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:363:2280] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2026-02-19T08:15:33.730763Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:492: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-02-19T08:15:33.730805Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:167: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:473:2105], reason: tenant updated 2026-02-19T08:15:33.731196Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:33.734418Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:33.734558Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-02-19T08:15:34.079027Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TCmsTest::TestKeepAvailableModeDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2026-02-19T08:13:46.171051Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:46.197658Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:46.198793Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:46.198859Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:46.198930Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:46.199205Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:46.200172Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2026-02-19T08:13:46.200253Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:46.200350Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:46.200463Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:46.214268Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:46.214337Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:46.216747Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.216922Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217059Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217229Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217357Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217484Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217610Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.217637Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:46.219127Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2026-02-19T08:13:46.219180Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2026-02-19T08:13:46.219252Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:46.219318Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:46.219662Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:46.222629Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:46.223099Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:46.223176Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:46.223230Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:46.223463Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:46.224662Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:46.224722Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:46.225592Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2026-02-19T08:13:46.225625Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:46.225689Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:46.225804Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:46.253864Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:46.253929Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:46.255523Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.255676Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.255802Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.255921Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.256035Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.256146Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.256288Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:46.256317Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:46.256371Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2026-02-19T08:13:46.256398Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2026-02-19T08:13:46.256437Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:46.256473Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:46.261744Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2026-02-19T08:13:46.261821Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:46.262050Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:46.262292Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:46.262514Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:46.262559Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:46.263138Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:46.263242Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:46.263430Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:46.263460Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-02-19T08:13:46.314240Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:46.316319Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:46.316487Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2092] 2026-02-19T08:13:46.316523Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2092] 2026-02-19T08:13:46.316624Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:46.316844Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:46.316947Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:46.317037Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:46.317059Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-02-19T08:13:46.317078Z node 1 :BS_NODE DEBUG: {NWDC32@distconf ... : SelectForward node: 34 selfDC: 2 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72057594046678944 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [33:334:2203] 2026-02-19T08:15:34.305636Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594046678944] forward result remote node 33 [34:559:2161] 2026-02-19T08:15:34.305826Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594046678944] remote node connected [34:559:2161] 2026-02-19T08:15:34.305912Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594046678944]::SendEvent [34:559:2161] 2026-02-19T08:15:34.306263Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594046678944] Accept Connect Originator# [34:559:2161] 2026-02-19T08:15:34.306649Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594046678944] connected with status OK role: Leader [34:559:2161] 2026-02-19T08:15:34.306721Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594046678944] send queued [34:559:2161] 2026-02-19T08:15:34.306834Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046678944] send [34:559:2161] 2026-02-19T08:15:34.306869Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594046678944] push event to server [34:559:2161] 2026-02-19T08:15:34.306956Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594046678944]::SendEvent [34:559:2161] 2026-02-19T08:15:34.307144Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594046678944] Push Sender# [34:558:2161] EventType# 271122945 2026-02-19T08:15:34.307304Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2026-02-19T08:15:34.307398Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:34.307704Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:15:34.307810Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:15:34.309301Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [34:565:2162] 2026-02-19T08:15:34.309342Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [34:565:2162] 2026-02-19T08:15:34.309386Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [34:566:2163] 2026-02-19T08:15:34.309413Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [34:566:2163] 2026-02-19T08:15:34.309586Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [34:565:2162] 2026-02-19T08:15:34.309652Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037888] queue send [34:566:2163] 2026-02-19T08:15:34.309903Z node 34 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [33:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:34.309970Z node 34 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 34 selfDC: 2 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [33:333:2202] 2026-02-19T08:15:34.310096Z node 34 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:34.310278Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 33 [34:565:2162] 2026-02-19T08:15:34.310539Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [34:565:2162] 2026-02-19T08:15:34.310582Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [34:565:2162] 2026-02-19T08:15:34.310894Z node 34 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:15:34.311521Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [34:565:2162] 2026-02-19T08:15:34.311909Z node 33 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-02-19T08:15:34.311972Z node 33 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-02-19T08:15:34.312046Z node 33 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-02-19T08:15:34.312268Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [34:565:2162] 2026-02-19T08:15:34.312309Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [34:565:2162] 2026-02-19T08:15:34.312348Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [34:565:2162] 2026-02-19T08:15:34.312474Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [34:565:2162] 2026-02-19T08:15:34.312828Z node 34 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [33:471:2304] CurrentLeaderTablet: [33:487:2315] CurrentGeneration: 1 CurrentStep: 0} 2026-02-19T08:15:34.312913Z node 34 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [33:471:2304] CurrentLeaderTablet: [33:487:2315] CurrentGeneration: 1 CurrentStep: 0} 2026-02-19T08:15:34.313003Z node 34 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72075186224037888 leader: [33:471:2304] followers: 0 2026-02-19T08:15:34.313068Z node 34 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 34 selfDC: 2 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72075186224037888 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [33:471:2304] 2026-02-19T08:15:34.313129Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 33 [34:566:2163] 2026-02-19T08:15:34.313309Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [34:562:2162] EventType# 268959744 2026-02-19T08:15:34.313581Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-02-19T08:15:34.313678Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:34.313925Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:34.314056Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 204b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-02-19T08:15:34.314158Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:15:34.314409Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [34:566:2163] 2026-02-19T08:15:34.314455Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037888]::SendEvent [34:566:2163] 2026-02-19T08:15:34.314845Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-02-19T08:15:34.314931Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:34.315057Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:15:34.315152Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:15:34.315578Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [34:566:2163] 2026-02-19T08:15:34.315886Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [34:566:2163] 2026-02-19T08:15:34.315924Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [34:566:2163] 2026-02-19T08:15:34.315958Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037888] push event to server [34:566:2163] 2026-02-19T08:15:34.316021Z node 34 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037888]::SendEvent [34:566:2163] 2026-02-19T08:15:34.316173Z node 33 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [34:563:2163] EventType# 268959744 2026-02-19T08:15:34.316310Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-02-19T08:15:34.316369Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:34.316547Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:34.316655Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:34.316718Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-02-19T08:15:34.316772Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:15:34.316968Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-02-19T08:15:34.317016Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:15:34.317094Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:15:34.317134Z node 33 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::StateStorageTwoRings >> TCmsTest::Notifications [GOOD] >> TCmsTest::PermissionDuration >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> KqpScheme::ParallelDropStreamingQuery [GOOD] >> TCmsTenatsTest::TestTenantLimit >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled >> TestMalformedRequest::ContentLengthCorrect [GOOD] >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageNodesFromOneRing |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete >> StatisticsSaveLoad::Simple >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> StatisticsSaveLoad::ForbidAccess >> TestMalformedRequest::ContentLengthLower >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::PriorityRange >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::ParallelDropStreamingQuery [GOOD] Test command err: 2026-02-19T08:13:48.359676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486584658251828:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:48.360155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:48.413289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:13:48.709838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:13:48.743770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:48.743883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:48.800409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:48.840442Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:48.841854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486584658251716:2081] 1771488828352108 != 1771488828352111 2026-02-19T08:13:48.927579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:13:48.956271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:48.956290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:48.956296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:48.956378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:49.377302Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:49.401110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:49.463422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.629187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.801836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:49.889962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:51.765739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486597543155480:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.766021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.766399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486597543155490:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:51.766526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.175047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.218769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.268297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.301508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.337681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.401111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.478009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.523826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:13:52.615294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486601838123672:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.615369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.615580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486601838123678:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.615609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.615672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486601838123677:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:13:52.619240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:13:52.629682Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486601838123681:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:13:52.698393Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486601838123732:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:13:53.356745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486584658251828:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:53.356845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:13:55.346358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# 2026-02-19T08:15:36.584367Z node 8 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TLockStreamingQueryRequestActor] OwnerId: [8:7608487036989611319:8914], ActorId: [8:7608487045579550337:5049], TraceId: /Root/MyFolder/MyStreamingQuery, State: Get query info, Finish with ABORTED, Issues: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later }, SessionId: ydb://session/3?node_id=8&id=NGJlN2VjMzItZjA4ZGQxNDQtNDlkMzE4NzMtYzVmYTg4ZDQ=, TxId: 01khtfg0j74rgdeaqvxkdc9368 2026-02-19T08:15:36.584554Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989610991:8741] QueryPath: /Root/MyFolder/MyStreamingQuery. Prepare streaming query before operation [8:7608487036989611026:8772] failed ABORTED, issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.584594Z node 8 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TLockStreamingQueryRequestActor] OwnerId: [8:7608487036989611323:8917], ActorId: [8:7608487045579550366:5029], TraceId: /Root/MyFolder/MyStreamingQuery, State: Get query info, Finish with ABORTED, Issues: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later }, SessionId: ydb://session/3?node_id=8&id=OWNlYTYyNjgtZGE2NTU5NTMtNzZjMWRlMzUtNWEyMmVjNzY=, TxId: 01khtfg0jabq5pv369g6r9rgnc 2026-02-19T08:15:36.584641Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=YTYwYWFmMGQtN2VkNGIyNDktNmI4ZDJmOTMtNjIyNGU4OGI=, ActorId: [8:7608487036989611072:3192], ActorState: ExecuteState, LegacyTraceId: 01khtffyrg4g6xxyh8e38pxb1c, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Executing operation with object \"STREAMING_QUERY\"" issue_code: 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# 2026-02-19T08:15:36.584689Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989610991:8741] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Prepare streaming query before operation failed subissue: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } } 2026-02-19T08:15:36.584750Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611146:8832] QueryPath: /Root/MyFolder/MyStreamingQuery. Prepare streaming query before operation [8:7608487036989611169:8853] failed ABORTED, issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.584856Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611146:8832] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Prepare streaming query before operation failed subissue: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } } 2026-02-19T08:15:36.585111Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TLockStreamingQueryTableActor] OwnerId: [8:7608487036989611308:8905] ActorId: [8:7608487036989611317:8912] QueryPath: /Root/MyFolder/MyStreamingQuery. Lock streaming query [8:7608487045579550337:5049] failed ABORTED, issues: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } 2026-02-19T08:15:36.585112Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=ZjlhZDFlMDQtNDkzYjY5NTQtMzM5MTYyNDItZTM1YWQ5M2Q=, ActorId: [8:7608487036989610860:3145], ActorState: ExecuteState, LegacyTraceId: 01khtffyqt9k1wv3b1pv37003r, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Executing operation with object \"STREAMING_QUERY\"" issue_code: 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# 2026-02-19T08:15:36.585176Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TLockStreamingQueryTableActor] OwnerId: [8:7608487036989611309:8906] ActorId: [8:7608487036989611321:8916] QueryPath: /Root/MyFolder/MyStreamingQuery. Lock streaming query [8:7608487045579550366:5029] failed ABORTED, issues: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } 2026-02-19T08:15:36.585182Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TLockStreamingQueryTableActor] OwnerId: [8:7608487036989611308:8905] ActorId: [8:7608487036989611317:8912] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.585245Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TLockStreamingQueryTableActor] OwnerId: [8:7608487036989611309:8906] ActorId: [8:7608487036989611321:8916] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.585305Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611308:8905] QueryPath: /Root/MyFolder/MyStreamingQuery. Prepare streaming query before operation [8:7608487036989611317:8912] failed ABORTED, issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.585340Z node 8 :KQP_PROXY WARN: queries.cpp:352: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611309:8906] QueryPath: /Root/MyFolder/MyStreamingQuery. Prepare streaming query before operation [8:7608487036989611321:8916] failed ABORTED, issues: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } 2026-02-19T08:15:36.585383Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611308:8905] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Prepare streaming query before operation failed subissue: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } } 2026-02-19T08:15:36.585392Z node 8 :KQP_PROXY WARN: queries.cpp:367: [StreamingQueries] [TDropStreamingQueryActor] OwnerId: [0:0:0] ActorId: [8:7608487036989611309:8906] QueryPath: /Root/MyFolder/MyStreamingQuery. Failed ABORTED, with issues: {
: Error: Prepare streaming query before operation failed subissue: {
: Error: Lock streaming query failed subissue: {
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later } } } 2026-02-19T08:15:36.586480Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=ZWFmYWNiOWEtN2I2NjNiNC04YTUyZmFkNS05MDQ1ZWIzNA==, ActorId: [8:7608487036989611082:3196], ActorState: ExecuteState, LegacyTraceId: 01khtffyrxa2hfm1k5fvczpq3w, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Executing operation with object \"STREAMING_QUERY\"" issue_code: 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# 2026-02-19T08:15:36.587212Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=NTUxNTg3NmQtNGJhODUxZDAtYTFiMDgxMGEtZDgwNjQ5ZDg=, ActorId: [8:7608487036989611104:3204], ActorState: ExecuteState, LegacyTraceId: 01khtffys42t3evmdkyceq7vp4, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Executing operation with object \"STREAMING_QUERY\"" issue_code: 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# 2026-02-19T08:15:36.587427Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=ODQ0OTQ5NGEtOTZhMDc0N2EtZTc4ODJmYjMtZWQ0MjI0NjE=, ActorId: [8:7608487036989611101:3203], ActorState: ExecuteState, LegacyTraceId: 01khtffys11fcm6cpajf2qh1hf, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Executing operation with object \"STREAMING_QUERY\"" issue_code: 2011 severity: 1 issues { message: "
: Error: Prepare streaming query before operation failed\n
: Error: Lock streaming query failed\n
: Error: Streaming query /Root/MyFolder/MyStreamingQuery already under operation DROP STREAMING QUERY started at 2026-02-19T08:15:34.391371Z, try repeat request later\n" issue_code: 2011 severity: 1 } } trace_id# >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::TestKeepAvailableMode >> TCmsTest::DynamicConfig >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::ManualRequestApproval >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::PriorityLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2026-02-19T08:15:09.759660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486929388055969:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:09.760832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:09.840930Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:09.966368Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:15:10.513934Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.514065Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:10.541503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.602015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:10.602093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:10.607549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:10.607628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:10.621054Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:10.621336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:10.631294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:10.835275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.837826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:10.837967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:10.852990Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:10.870042Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:11.309974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007b2/r3tmp/yandexyvS7M4.tmp 2026-02-19T08:15:11.310001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007b2/r3tmp/yandexyvS7M4.tmp 2026-02-19T08:15:11.310176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007b2/r3tmp/yandexyvS7M4.tmp 2026-02-19T08:15:11.310268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:11.360634Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.383731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:11.417897Z INFO: TTestServer started on Port 25056 GrpcPort 25558 PQClient connected to localhost:25558 2026-02-19T08:15:11.894427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:12.094409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:14.763081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486929388055969:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:14.763165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:16.099241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959452828422:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:16.106198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:16.106959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959452828435:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:16.107021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959452828436:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:16.107269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:16.111979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:16.142496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486959452828439:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:15:16.230267Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486959452828528:3068] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:16.581209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:16.620194Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608486959452828538:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:16.622168Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=OGE0NzMwYzAtZDI5Y2RmNjgtZDFjZTkyZDItOGNhMzA2MWY=, ActorId: [1:7608486959452828420:2339], ActorState: ExecuteState, LegacyTraceId: 01khtffckw21ywr7ep5dr3fdvw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:15:16.624566Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:15:16.735863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:16.947032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608486963747796285: ... : partition.cpp:2366: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.733043Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.733054Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037904][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.761160Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:15:39.761196Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761206Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.761225Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761237Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][3][StateIdle] Try persist 2026-02-19T08:15:39.761656Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.761669Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:15:39.761679Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761689Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.761693Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761700Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.761703Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761729Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.761754Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.761765Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:15:39.769865Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.769900Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.769914Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.769931Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.769944Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037900][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.770872Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:15:39.770911Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.770931Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.770956Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.770973Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][3][StateIdle] Try persist 2026-02-19T08:15:39.772423Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:15:39.772462Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.772485Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.772509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.772527Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][4][StateIdle] Try persist 2026-02-19T08:15:39.827237Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:15:39.827276Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.827288Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.827311Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.827324Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][3][StateIdle] Try persist 2026-02-19T08:15:39.829780Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:15:39.829813Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.829823Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.829840Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.829852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][4][StateIdle] Try persist 2026-02-19T08:15:39.833942Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.833978Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.833989Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.834008Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.834023Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037904][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.861543Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:15:39.861580Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.861593Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.861613Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.861627Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][3][StateIdle] Try persist 2026-02-19T08:15:39.861986Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:15:39.861988Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.862006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.862007Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.862015Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.862016Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.862026Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.862029Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.862034Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:15:39.862039Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.870250Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.870287Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.870298Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.870315Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.870330Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037900][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.871181Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:15:39.871200Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.871209Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.871221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.871230Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][3][StateIdle] Try persist 2026-02-19T08:15:39.872752Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:15:39.872780Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.872790Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.872802Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.872812Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][4][StateIdle] Try persist >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2026-02-19T08:11:22.292769Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485955150128761:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:22.293067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:22.798810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:22.798958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:22.830448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:22.857375Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485955150128545:2081] 1771488682267031 != 1771488682267034 2026-02-19T08:11:22.866601Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:22.871671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:22.988863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:22.999310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:22.999333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:22.999348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:22.999476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:23.024524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:23.031563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:23.048720Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7608485959445096682:2300] 2026-02-19T08:11:23.049001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:23.061546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:23.061629Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:23.063437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:23.063504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:23.063549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:23.063934Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:23.063969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:23.064017Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:7608485959445096697:2300] in generation 1 2026-02-19T08:11:23.077501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:23.128041Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:23.128178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:23.128236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:7608485959445096699:2301] 2026-02-19T08:11:23.128250Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:23.128274Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:23.128294Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.128490Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:23.128566Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:23.128588Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.128607Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:23.128639Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:23.128664Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.129433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485959445096680:2497], serverId# [1:7608485959445096683:2498], sessionId# [0:0:0] 2026-02-19T08:11:23.129533Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:23.129855Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:23.129929Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:11:23.131266Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:23.131353Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:23.131414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:23.133220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485959445096713:2514], serverId# [1:7608485959445096714:2515], sessionId# [0:0:0] 2026-02-19T08:11:23.138663Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1771488683181 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488683181 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:23.138696Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.138819Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:23.138881Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.138894Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:23.138929Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771488683181:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:23.139175Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1771488683181:281474976715657 keys extracted: 0 2026-02-19T08:11:23.139323Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:23.139427Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.139460Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:23.141539Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:23.143001Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:23.144580Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1771488683180 2026-02-19T08:11:23.144593Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.144621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1771488683188 2026-02-19T08:11:23.144688Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1771488683181} 2026-02-19T08:11:23.144719Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.144766Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.144787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:23.144809Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:23.144844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771488683181 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7608485955150128904:2145], exec latency: 3 ms, propose latency: 5 ms 2026-02-19T08:11:23.144873Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:11:23.144913Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.147215Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7608485959445096699:2301][Inactive] Handle NKikimrChangeEx ... topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:15:39.263769Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2077: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2026-02-19T08:15:39.263923Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-02-19T08:15:39.264042Z node 29 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.264138Z node 29 :PERSQUEUE DEBUG: partition.cpp:2452: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-02-19T08:15:39.264240Z node 29 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:15:39.264338Z node 29 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.264442Z node 29 :PERSQUEUE DEBUG: partition.cpp:2516: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-02-19T08:15:39.264580Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-02-19T08:15:39.264697Z node 29 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:15:39.264786Z node 29 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:15:39.264897Z node 29 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.264996Z node 29 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-02-19T08:15:39.265203Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-02-19T08:15:39.266332Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2026-02-19T08:15:39.267675Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2026-02-19T08:15:39.268301Z node 29 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:15:39.268662Z node 29 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2026-02-19T08:15:39.270228Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [29:1022:2840] 2026-02-19T08:15:39.270539Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2026-02-19T08:15:39.270675Z node 29 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:15:39.281317Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:492: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-02-19T08:15:39.281538Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:15:39.281757Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-02-19T08:15:39.281933Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2026-02-19T08:15:39.282453Z node 29 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:39.282560Z node 29 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.282655Z node 29 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:39.282746Z node 29 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:39.282839Z node 29 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:39.282971Z node 29 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:15:39.283137Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2026-02-19T08:15:39.283617Z node 29 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][29:1115:2882] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2026-02-19T08:15:39.283842Z node 29 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][29:1051:2882] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2026-02-19T08:15:39.284110Z node 29 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2026-02-19T08:15:39.284202Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:1099: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2026-02-19T08:15:39.284970Z node 29 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-02-19T08:15:39.393449Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'streamImpl' requestId: 2026-02-19T08:15:39.393581Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:15:39.393900Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2026-02-19T08:15:39.395838Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 10 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2026-02-19T08:15:39.395944Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 10. Send blob request. 2026-02-19T08:15:39.396142Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 5 times before, last time 1970-01-01T00:00:06.000000Z 2026-02-19T08:15:39.396224Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 2 times before, last time 1970-01-01T00:00:06.000000Z 2026-02-19T08:15:39.396259Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2026-02-19T08:15:39.396292Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-02-19T08:15:39.396385Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-02-19T08:15:39.396463Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-02-19T08:15:39.396579Z node 29 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 10. All 6 blobs are from cache. 2026-02-19T08:15:39.396768Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.396854Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.396900Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.396945Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.396990Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.397033Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:39.397199Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 6 blobs 2026-02-19T08:15:39.397630Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-02-19T08:15:39.397786Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2026-02-19T08:15:39.397868Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-02-19T08:15:39.397938Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-02-19T08:15:39.398005Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-02-19T08:15:39.398074Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-02-19T08:15:39.398302Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> TCmsTest::ManualRequestApproval [GOOD] >> TCmsTest::ManualRequestApprovalLockingAllNodes >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::TestLoadLog [GOOD] >> TestMalformedRequest::ContentLengthLower [GOOD] >> TCmsTest::PriorityLocks [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::LastRefreshTime |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLoadLog [GOOD] >> TestMalformedRequest::ContentLengthHigher >> Cdc::InitialScanAndResolvedTimestamps [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-02-19T08:15:05.336415Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:15:05.341526Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:15:05.342162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:15:05.374975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:15:05.375293Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:15:05.383448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:15:05.383934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:15:05.384190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:15:05.384304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:15:05.384405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:15:05.384535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:15:05.384647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:15:05.384772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:15:05.384874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:15:05.384980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:15:05.385104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:15:05.385229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:15:05.385360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:15:05.388521Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:15:05.415846Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:15:05.416044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:15:05.416103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:15:05.416312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:05.416483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:15:05.416563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:15:05.416610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:15:05.416714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:15:05.416786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:15:05.416832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:15:05.416863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:15:05.417062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:05.417132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:15:05.417174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:15:05.417216Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:15:05.417310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:15:05.417366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:15:05.417420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:15:05.417474Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:15:05.417554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:15:05.417601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:15:05.417630Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:15:05.417674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:15:05.417735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:15:05.417770Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:15:05.417978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:15:05.418066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:15:05.418099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:15:05.418234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:15:05.418294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:15:05.418333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:15:05.418384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:15:05.418444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:15:05.418477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:15:05.418524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:15:05.418576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.033476Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.033758Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:15:44.034007Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.034237Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.034748Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [5:618:2623] finished for tablet 9437184 2026-02-19T08:15:44.035566Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":39269163,"name":"_full_task","f":39269163,"d_finished":0,"c":0,"l":39287216,"d":18053},"events":[{"name":"bootstrap","f":39269495,"d_finished":1773,"c":1,"l":39271268,"d":1773},{"a":39286110,"name":"ack","f":39280851,"d_finished":4836,"c":2,"l":39285897,"d":5942},{"a":39286070,"name":"processing","f":39271494,"d_finished":9241,"c":5,"l":39285903,"d":10387},{"name":"ProduceResults","f":39270591,"d_finished":6721,"c":9,"l":39286645,"d":6721},{"a":39286655,"name":"Finish","f":39286655,"d_finished":0,"c":0,"l":39287216,"d":561},{"name":"task_result","f":39271530,"d_finished":4256,"c":3,"l":39280603,"d":4256}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.035684Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:15:44.036484Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":39269163,"name":"_full_task","f":39269163,"d_finished":0,"c":0,"l":39288125,"d":18962},"events":[{"name":"bootstrap","f":39269495,"d_finished":1773,"c":1,"l":39271268,"d":1773},{"a":39286110,"name":"ack","f":39280851,"d_finished":4836,"c":2,"l":39285897,"d":6851},{"a":39286070,"name":"processing","f":39271494,"d_finished":9241,"c":5,"l":39285903,"d":11296},{"name":"ProduceResults","f":39270591,"d_finished":6721,"c":9,"l":39286645,"d":6721},{"a":39286655,"name":"Finish","f":39286655,"d_finished":0,"c":0,"l":39288125,"d":1470},{"name":"task_result","f":39271530,"d_finished":4256,"c":3,"l":39280603,"d":4256}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.014ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.282ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.639ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.244ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.032ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.165ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.224ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.6ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:15:44.036640Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:15:44.015019Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-02-19T08:15:44.036705Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:15:44.036928Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> TCmsTest::ManualRequestApprovalLockingAllNodes [GOOD] >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved >> TCmsTest::RequestRestartServicesOk >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2026-02-19T08:13:42.406977Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:42.443385Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:42.444480Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:42.444550Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:42.444622Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:42.444947Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:42.446106Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:42.446164Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:42.447104Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:159:2080] ControllerId# 72057594037932033 2026-02-19T08:13:42.447147Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:42.447254Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:42.447386Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:42.459704Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:42.459774Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:42.462028Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:167:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462234Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:168:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462361Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:169:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462491Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:170:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462613Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:171:2089] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462773Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:172:2090] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462903Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:158:2079] Create Queue# [1:173:2091] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.462938Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:42.463049Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:159:2080] 2026-02-19T08:13:42.463089Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:159:2080] 2026-02-19T08:13:42.463154Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:42.463215Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:42.464211Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:42.464299Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:42.467098Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:42.467535Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:42.467578Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:42.467630Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:42.467860Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:42.468805Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:184:2080] ControllerId# 72057594037932033 2026-02-19T08:13:42.468837Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:42.468897Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:42.469000Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:42.478180Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:42.478230Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:42.482469Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:191:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.482629Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:192:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.482780Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:193:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.482958Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:194:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.483117Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:195:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.483264Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:196:2089] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.483388Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:183:2079] Create Queue# [2:197:2090] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:42.483413Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:42.483526Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:184:2080] 2026-02-19T08:13:42.483556Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:184:2080] 2026-02-19T08:13:42.483602Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:42.483651Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:42.484188Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:42.484277Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:42.487081Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } PDisks { NodeID: 4 PDiskID: 1 Path: "SectorMap:3:3200" PDiskGuid: 4 } PDisks { NodeID: 5 PDiskID: 1 Path: "SectorMap:4:3200" PDiskGuid: 5 } PDisks { NodeID: 6 PDiskID: 1 Path: "SectorMap:5:3200" PDiskGuid: 6 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:42.487550Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:42.487601Z node 3 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:2:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:42.487647Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:42.487857Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:42.488701Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:207:2080] ControllerId# 72057594037932033 2026-02-19T08:13:42.488734Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:42.488795Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:42.488918Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:42.497264Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:42.497340Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# ... te node 78 [73:2103:2493] 2026-02-19T08:15:42.672813Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [73:2103:2493] 2026-02-19T08:15:42.672845Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037892]::SendEvent [73:2103:2493] 2026-02-19T08:15:42.673140Z node 78 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [73:2103:2493] 2026-02-19T08:15:42.673510Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [73:2103:2493] 2026-02-19T08:15:42.673542Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [73:2103:2493] 2026-02-19T08:15:42.674601Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [73:2107:2495] 2026-02-19T08:15:42.674646Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [73:2107:2495] 2026-02-19T08:15:42.674706Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [78:1314:2100] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:42.674749Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 73 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72075186224037893 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [78:1314:2100] 2026-02-19T08:15:42.674836Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 78 [73:2107:2495] 2026-02-19T08:15:42.674946Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [73:2107:2495] 2026-02-19T08:15:42.674989Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037893]::SendEvent [73:2107:2495] 2026-02-19T08:15:42.675191Z node 78 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [73:2107:2495] 2026-02-19T08:15:42.675505Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [73:2107:2495] 2026-02-19T08:15:42.675538Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [73:2107:2495] 2026-02-19T08:15:42.676351Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [73:2110:2497] 2026-02-19T08:15:42.676390Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [73:2110:2497] 2026-02-19T08:15:42.676453Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [77:1318:2141] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:42.676493Z node 73 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 73 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72075186224037894 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:15:42.676678Z node 73 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:15:42.676819Z node 73 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2026-02-19T08:15:42.676884Z node 73 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2026-02-19T08:15:42.676926Z node 73 :STATESTORAGE DEBUG: statestorage_replica.cpp:235: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2026-02-19T08:15:42.676978Z node 73 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [78:1976:2268] CurrentLeaderTablet: [78:1981:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-02-19T08:15:42.677053Z node 73 :STATESTORAGE DEBUG: statestorage_proxy.cpp:485: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [78:1976:2268] CurrentLeaderTablet: [78:1981:2271] CurrentGeneration: 3 CurrentStep: 0} 2026-02-19T08:15:42.677104Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:831: ApplyEntry tabletId: 72075186224037894 leader: [78:1976:2268] followers: 0 2026-02-19T08:15:42.677147Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 73 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72075186224037894 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [78:1976:2268] 2026-02-19T08:15:42.677201Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 78 [73:2110:2497] 2026-02-19T08:15:42.677327Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [73:2110:2497] 2026-02-19T08:15:42.677369Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037894]::SendEvent [73:2110:2497] 2026-02-19T08:15:42.677692Z node 78 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [73:2110:2497] 2026-02-19T08:15:42.679360Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [73:2110:2497] 2026-02-19T08:15:42.679406Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [73:2110:2497] 2026-02-19T08:15:42.680227Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [73:2114:2499] 2026-02-19T08:15:42.680255Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [73:2114:2499] 2026-02-19T08:15:42.680300Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [78:1822:2195] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:42.680334Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 73 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72075186224037895 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [78:1822:2195] 2026-02-19T08:15:42.680377Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 78 [73:2114:2499] 2026-02-19T08:15:42.680445Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [73:2114:2499] 2026-02-19T08:15:42.680480Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037895]::SendEvent [73:2114:2499] 2026-02-19T08:15:42.680659Z node 78 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [73:2114:2499] 2026-02-19T08:15:42.680982Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [73:2114:2499] 2026-02-19T08:15:42.681021Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [73:2114:2499] 2026-02-19T08:15:42.681993Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [73:2117:2501] 2026-02-19T08:15:42.682025Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [73:2117:2501] 2026-02-19T08:15:42.682071Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [78:1825:2197] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:42.682102Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 73 selfDC: 1 leaderDC: 3 resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 1 disallowed: 0 tabletId: 72075186224037896 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [78:1825:2197] 2026-02-19T08:15:42.682157Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 78 [73:2117:2501] 2026-02-19T08:15:42.682214Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [73:2117:2501] 2026-02-19T08:15:42.682243Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037896]::SendEvent [73:2117:2501] 2026-02-19T08:15:42.682372Z node 78 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [73:2117:2501] 2026-02-19T08:15:42.682620Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [73:2117:2501] 2026-02-19T08:15:42.682649Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [73:2117:2501] 2026-02-19T08:15:42.683416Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [73:2119:2502] 2026-02-19T08:15:42.683487Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [73:2119:2502] 2026-02-19T08:15:42.683607Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [73:616:2179] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:15:42.683695Z node 73 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 73 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [73:616:2179] 2026-02-19T08:15:42.683869Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [73:2119:2502] 2026-02-19T08:15:42.684003Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [73:2119:2502] 2026-02-19T08:15:42.684112Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [73:2119:2502] 2026-02-19T08:15:42.684193Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [73:2119:2502] 2026-02-19T08:15:42.684371Z node 73 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [73:2119:2502] 2026-02-19T08:15:42.684544Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [73:2119:2502] 2026-02-19T08:15:42.684600Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [73:2119:2502] 2026-02-19T08:15:42.684653Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [73:2119:2502] 2026-02-19T08:15:42.684742Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [73:2119:2502] 2026-02-19T08:15:42.684796Z node 73 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [73:2119:2502] 2026-02-19T08:15:42.684899Z node 73 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [73:587:2174] EventType# 268697616 >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestSmartAvailabilityModeFeatureFlagDisabled >> TCmsTest::ManagePermissions >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2026-02-19T08:11:22.866566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608485957073623768:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:22.871484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:23.214099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:23.249497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:23.261361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:23.364604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:23.382197Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:23.385962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608485957073623722:2081] 1771488682864305 != 1771488682864308 2026-02-19T08:11:23.526035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:23.667692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:23.667716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:23.667729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:23.667821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:23.753429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:23.772873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:23.810031Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7608485961368591859:2300] 2026-02-19T08:11:23.810315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:11:23.838051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:11:23.838138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:11:23.839625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:11:23.839670Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:11:23.839707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:11:23.840031Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:11:23.840077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:11:23.840112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:7608485961368591874:2300] in generation 1 2026-02-19T08:11:23.858191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:11:23.914534Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:11:23.914677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:11:23.914738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:7608485961368591876:2301] 2026-02-19T08:11:23.914747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:23.914761Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:11:23.914771Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.914910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:23.916559Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:11:23.916658Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:11:23.916696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485961368591857:2499], serverId# [1:7608485961368591861:2500], sessionId# [0:0:0] 2026-02-19T08:11:23.916778Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.916790Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:23.916815Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:11:23.916832Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.916850Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:11:23.917117Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:11:23.917174Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-02-19T08:11:23.919060Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:23.919818Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:11:23.919884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:11:23.924873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:7608485961368591899:2518], serverId# [1:7608485961368591900:2519], sessionId# [0:0:0] 2026-02-19T08:11:23.945604Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1771488683965 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488683965 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:11:23.945664Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.946333Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:11:23.946428Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.946444Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:11:23.946466Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771488683965:281474976710657] in PlanQueue unit at 72075186224037888 2026-02-19T08:11:23.946835Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1771488683965:281474976710657 keys extracted: 0 2026-02-19T08:11:23.946994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:11:23.947104Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:11:23.947155Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:11:23.965196Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:11:23.965892Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:11:23.967244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1771488683964 2026-02-19T08:11:23.967260Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:11:23.967287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1771488684000 2026-02-19T08:11:23.967334Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1771488683965} 2026-02-19T08:11:23.967372Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.967404Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:11:23.967431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:11:23.967469Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:11:23.967508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771488683965 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7608485961368591370:2144], exec latency: 18 ms, propose latency: 20 ms 2026-02-19T08:11:23.967549Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-02-19T08:11:23.967590Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-1 ... artition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:44.544323Z node 30 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:44.544362Z node 30 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:44.565741Z node 30 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:44.565815Z node 30 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:44.565857Z node 30 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:44.565902Z node 30 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:15:44.565939Z node 30 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:44.566788Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3816: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2026-02-19T08:15:44.566905Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:15:44.567069Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:42: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2026-02-19T08:15:44.567348Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:15:44.578679Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:78: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2026-02-19T08:15:44.578778Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 } 2026-02-19T08:15:44.578845Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:15:44.578905Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3848: Waiting for PlanStep# 12000 from mediator time cast 2026-02-19T08:15:44.579032Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][30:909:2780] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-02-19T08:15:44.579187Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][30:1165:2955] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-02-19T08:15:44.579438Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2026-02-19T08:15:44.579666Z node 30 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [30:1165:2955], at tablet# 72075186224037888 2026-02-19T08:15:44.579729Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2026-02-19T08:15:44.579863Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][30:1165:2955] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-02-19T08:15:44.580016Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1248:2955] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-02-19T08:15:44.580289Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'streamImpl' requestId: 2026-02-19T08:15:44.580345Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:15:44.580476Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2026-02-19T08:15:44.580635Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'streamImpl' requestId: 2026-02-19T08:15:44.580678Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:15:44.580746Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2077: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 5 size 26 offset: -1 2026-02-19T08:15:44.580823Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-02-19T08:15:44.580888Z node 30 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:15:44.580930Z node 30 :PERSQUEUE DEBUG: partition.cpp:2452: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-02-19T08:15:44.580982Z node 30 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:15:44.581018Z node 30 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:15:44.581061Z node 30 :PERSQUEUE DEBUG: partition.cpp:2516: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-02-19T08:15:44.581192Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-02-19T08:15:44.581253Z node 30 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:15:44.581288Z node 30 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:15:44.581329Z node 30 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:15:44.581379Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-02-19T08:15:44.581478Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-02-19T08:15:44.582515Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 107 count 1 nextOffset 5 batches 1 2026-02-19T08:15:44.583033Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 4 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000? size 93 WTime 8950 2026-02-19T08:15:44.583346Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:15:44.583496Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 93 2026-02-19T08:15:44.584628Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 1 size 93 actorID [30:1129:2908] 2026-02-19T08:15:44.584719Z node 30 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:15:44.584836Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' size 93 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-02-19T08:15:44.585762Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'streamImpl' requestId: 2026-02-19T08:15:44.585853Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-02-19T08:15:44.586051Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2026-02-19T08:15:44.587066Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2026-02-19T08:15:44.587175Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-02-19T08:15:44.587349Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:07.000000Z 2026-02-19T08:15:44.587431Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-02-19T08:15:44.587598Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2026-02-19T08:15:44.587766Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-02-19T08:15:44.588135Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2026-02-19T08:15:44.588334Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-02-19T08:15:44.589114Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-02-19T08:15:44.589206Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:15:44.589354Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::SysTabletsNode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::CollectInfo >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::TestSmartAvailabilityModeFeatureFlagDisabled [GOOD] >> TCmsTest::TestSmartAvailabilityModeFallsBackToKeepAvailable |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TCmsTest::SysTabletsNode [GOOD] >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffixWithOverlap-Covered [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::ManualRequestApprovalAlreadyLockedNode [GOOD] >> TCmsTest::Mirror3dcPermissions >> TCmsTest::RequestReplaceDevicePDisk >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::TestSmartAvailabilityModeFallsBackToKeepAvailable [GOOD] >> TCmsTest::TestSmartAvailabilityModeStaysMaxAvailability >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequestsWrong >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::CosineDistanceWithPkSuffixWithOverlap-Covered [GOOD] Test command err: 2026-02-19T08:11:54.156843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486095727052813:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:54.156905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:54.208961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:11:54.561974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:54.562080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:54.630241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:54.655618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:54.687700Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:54.688662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486095727052783:2081] 1771488714153782 != 1771488714153785 2026-02-19T08:11:54.816709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:54.816743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:54.816751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:54.816870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:54.929121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:55.173402Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:55.361218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:55.432363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:55.616492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:55.839036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:55.932407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:57.899466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486108611956561:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:57.899576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:57.901964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486108611956571:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:57.902064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.225029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.280605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.320153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.388625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.423660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.462226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.507141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.559843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:58.651419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486112906924739:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.651507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.652304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486112906924744:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.652375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486112906924745:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.652510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:11:58.656985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:11:58.670634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486112906924748:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:11:58.767295Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486112906924799:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:11:59.157027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486095727052813:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:59.157103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:00.614296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:760848609572705 ... StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.188778Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:49.188835Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608487101734487482:7633], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.188850Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.188858Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:49.189796Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 268698118, Sender [3:7608486706597487183:2152], Recipient [3:7608486706597487194:2156]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 45 2026-02-19T08:15:49.189825Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvHive::TEvDeleteTabletReply 2026-02-19T08:15:49.189877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 45, at schemeshard: 72057594046644480 2026-02-19T08:15:49.190115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 58] was 1 2026-02-19T08:15:49.190337Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 268698118, Sender [3:7608486706597487183:2152], Recipient [3:7608486706597487194:2156]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 44 2026-02-19T08:15:49.190360Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvHive::TEvDeleteTabletReply 2026-02-19T08:15:49.190397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 44, at schemeshard: 72057594046644480 2026-02-19T08:15:49.190570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 57] was 1 2026-02-19T08:15:49.190730Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 268698118, Sender [3:7608486706597487183:2152], Recipient [3:7608486706597487194:2156]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 43 2026-02-19T08:15:49.190790Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvHive::TEvDeleteTabletReply 2026-02-19T08:15:49.190842Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 43, at schemeshard: 72057594046644480 2026-02-19T08:15:49.190992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 56] was 1 2026-02-19T08:15:49.191148Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435084, Sender [3:7608486706597487194:2156], Recipient [3:7608486706597487194:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:15:49.191174Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5494: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:15:49.191201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 3 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:15:49.191218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 58], at schemeshard: 72057594046644480 2026-02-19T08:15:49.191260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 3 2026-02-19T08:15:49.191284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 57], at schemeshard: 72057594046644480 2026-02-19T08:15:49.191303Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 2 2026-02-19T08:15:49.191316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 56], at schemeshard: 72057594046644480 2026-02-19T08:15:49.191353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 55] was 1 2026-02-19T08:15:49.191378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 55], at schemeshard: 72057594046644480 2026-02-19T08:15:49.191400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 54] was 5 2026-02-19T08:15:49.194632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:7608486740957228602:4169], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037930 ClientId: [3:7608486740957228602:4169] ServerId: [3:7608486740957228620:4178] } 2026-02-19T08:15:49.194662Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:15:49.194674Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037930, from:72057594046644480 is reset 2026-02-19T08:15:49.195974Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:7608486740957228636:4188], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037933 ClientId: [3:7608486740957228636:4188] ServerId: [3:7608486740957228647:4196] } 2026-02-19T08:15:49.196000Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:15:49.196009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037933, from:72057594046644480 is reset 2026-02-19T08:15:49.196100Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:7608486740957228634:4186], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037931 ClientId: [3:7608486740957228634:4186] ServerId: [3:7608486740957228642:4191] } 2026-02-19T08:15:49.196112Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:15:49.196120Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72075186224037931, from:72057594046644480 is reset 2026-02-19T08:15:49.196173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:45 2026-02-19T08:15:49.196191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:45 tabletId 72075186224037930 2026-02-19T08:15:49.196291Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037930 not found 2026-02-19T08:15:49.196320Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037933 not found 2026-02-19T08:15:49.196343Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037931 not found 2026-02-19T08:15:49.197292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:44 2026-02-19T08:15:49.197319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:44 tabletId 72075186224037933 2026-02-19T08:15:49.197352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:43 2026-02-19T08:15:49.197367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:43 tabletId 72075186224037931 2026-02-19T08:15:49.197397Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 4 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:15:49.197447Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486783906903627:5573], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197471Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197481Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:49.197512Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486783906903635:5577], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197523Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197531Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:49.197561Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [3:7608486783906903629:5574], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197572Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:15:49.197580Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046644480 2026-02-19T08:15:49.197749Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [3:7608487101734487487:7638], Recipient [3:7608486706597487194:2156]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:7608487101734487487:7638] ServerId: [3:7608487101734487488:7639] } 2026-02-19T08:15:49.197770Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:15:49.197778Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489510.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489510.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488310.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-02-19T08:15:12.736847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:15:12.767184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:15:12.767466Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:15:12.775249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:15:12.775492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:15:12.775726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:15:12.775860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:15:12.775963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:15:12.776110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:15:12.776221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:15:12.776339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:15:12.776467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:15:12.776580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:15:12.776677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:15:12.776783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:15:12.776885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:15:12.807098Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:15:12.807281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:15:12.807342Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:15:12.807516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:12.807660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:15:12.807729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:15:12.807771Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:15:12.807862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:15:12.807920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:15:12.807961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:15:12.807995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:15:12.808146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:12.808231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:15:12.808280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:15:12.808338Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:15:12.808429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:15:12.808499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:15:12.808548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:15:12.808578Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:15:12.808627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:15:12.808668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:15:12.808698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:15:12.808747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:15:12.808784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:15:12.808816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:15:12.809060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:15:12.809130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:15:12.809169Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:15:12.809294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:15:12.809336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:15:12.809366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:15:12.809419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:15:12.809459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:15:12.809490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:15:12.809531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... granule/portions;fline=constructor_portion.cpp:44;memory_size=286;data_size=260;sum=5688;count=20;size_of_portion=192; 2026-02-19T08:15:51.575621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2817; 2026-02-19T08:15:51.575712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-02-19T08:15:51.576336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=545; 2026-02-19T08:15:51.576409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3699; 2026-02-19T08:15:51.576464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3829; 2026-02-19T08:15:51.576534Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-02-19T08:15:51.576615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=31; 2026-02-19T08:15:51.576658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4296; 2026-02-19T08:15:51.576790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=84; 2026-02-19T08:15:51.576904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=66; 2026-02-19T08:15:51.577022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=78; 2026-02-19T08:15:51.577103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=54; 2026-02-19T08:15:51.578968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1825; 2026-02-19T08:15:51.580513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1475; 2026-02-19T08:15:51.580586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-02-19T08:15:51.580640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-02-19T08:15:51.580673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-02-19T08:15:51.580723Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=28; 2026-02-19T08:15:51.580752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-02-19T08:15:51.580833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2026-02-19T08:15:51.580862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-02-19T08:15:51.580903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2026-02-19T08:15:51.580963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=35; 2026-02-19T08:15:51.581136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=134; 2026-02-19T08:15:51.581168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15529; 2026-02-19T08:15:51.581274Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:15:51.581394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:15:51.581449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:15:51.581512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:15:51.589395Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=1; 2026-02-19T08:15:51.589617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:15:51.589734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=0; 2026-02-19T08:15:51.589798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:15:51.589839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:15:51.589868Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:15:51.589947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:15:51.590128Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.015000s; 2026-02-19T08:15:51.591993Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:15:51.592293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:15:51.592367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:15:51.592472Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=0; 2026-02-19T08:15:51.592537Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:15:51.592600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:15:51.592646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:15:51.592738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:15:51.593184Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.176000s; 2026-02-19T08:15:51.593229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |91.0%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeStateStorage |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::TestSmartAvailabilityModeStaysMaxAvailability [GOOD] >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> TCmsTest::WalleTasks >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::BridgeModeCollectInfo >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction >> StatisticsSaveLoad::Simple [GOOD] >> TCmsTest::RequestReplaceDevices >> TCmsTest::TestOutdatedState |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestSmartAvailabilityModeStaysMaxAvailability [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> StatisticsSaveLoad::Delete [GOOD] >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:15:43.451499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:43.680910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:43.790275Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:15:43.790857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:43.791019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:15:44.894009Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:45.067593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:45.067811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:45.129336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:45.238786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:15:46.431360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:46.431490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:46.431529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:46.437236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:46.437811Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:46.510650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:47.118990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:15:50.421668Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:50.428682Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:15:50.433594Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:50.469396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.469527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.514499Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:50.518042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:50.708956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.709082Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.710824Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.711489Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.712254Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.713088Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.713470Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.713675Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.713859Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.714072Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.714207Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.730991Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:50.931557Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:50.967790Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:15:50.967915Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:15:50.999260Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:15:51.000662Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:15:51.000897Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:15:51.001008Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:15:51.001078Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:15:51.001171Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:15:51.001235Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:15:51.001314Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:15:51.001955Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:15:51.004105Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1923:2460] 2026-02-19T08:15:51.008517Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:15:51.014607Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Describe result: PathErrorUnknown 2026-02-19T08:15:51.014691Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Creating table 2026-02-19T08:15:51.014805Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:15:51.032066Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:51.032175Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2157:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:51.033411Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2158:2602], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:15:51.094338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2655] 2026-02-19T08:15:51.094760Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2231:2655], schemeshard id = 72075186224037897 2026-02-19T08:15:51.101974Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:2139:2590] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-02-19T08:15:51.105441Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-02-19T08:15:51.245728Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:15:51.431341Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:15:51.557671Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:15:51.559847Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2567:2880], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:15:51.563180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:51.566177Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:15:51.566266Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Subscribe on create table tx: 281474976720658 2026-02-19T08:15:51.577152Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Subscribe on tx: 281474976720658 registered 2026-02-19T08:15:51.778026Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-02-19T08:15:51.913502Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:15:51.913607Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Column diff is empty, finishing 2026-02-19T08:15:52.501528Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:52.504134Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2758:3377] Owner: [1:2757:3376]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:15:52.504218Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2758:3377] Owner: [1:2757:3376]. Column diff is empty, finishing 2026-02-19T08:15:52.505907Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2767:3380], ActorId: [1:2768:3381], Starting query actor #1 [1:2769:3382] 2026-02-19T08:15:52.505995Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2768:3381], ActorId: [1:2769:3382], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:15:52.541473Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2768:3381], ActorId: [1:2769:3382], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NTVhMzNkMjQtNmUzODg1ZTUtMWNiNjAyMmEtNjY5M2Y2ODQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:15:53.235070Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2789:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:15:53.235392Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:15:53.237610Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:2791:3398] 2026-02-19T08:15:53.237815Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:2791:3398] 2026-02-19T08:15:53.238667Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2792:2999] 2026-02-19T08:15:53.238982Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:2791:3398], server id = [2:2792:2999], tablet id = 72075186224037894, status = OK 2026-02-19T08:15:53.239210Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2792:2999], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:15:53.242264Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:15:53.243403Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:15:53.243557Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:2789:3396], StatRequests.size() = 1 2026-02-19T08:15:53.244052Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:15:53.731302Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2768:3381], ActorId: [1:2769:3382], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NTVhMzNkMjQtNmUzODg1ZTUtMWNiNjAyMmEtNjY5M2Y2ODQ=, TxId: 2026-02-19T08:15:53.731411Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2768:3381], ActorId: [1:2769:3382], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NTVhMzNkMjQtNmUzODg1ZTUtMWNiNjAyMmEtNjY5M2Y2ODQ=, TxId: 2026-02-19T08:15:53.731890Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2767:3380], ActorId: [1:2768:3381], Got response [1:2769:3382] SUCCESS 2026-02-19T08:15:53.732848Z node 1 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 123 ], PathId[ [OwnerId: 1, LocalPathId: 1] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:15:53.746331Z node 1 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 345 ], PathId[ [OwnerId: 1, LocalPathId: 1] ], StatType[ 2 ], ColumnTag[ 2 ] >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceBrokenDevices >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:15:43.472182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:43.681046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:43.790278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:15:43.790916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:43.791088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:15:44.904064Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:45.067572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:45.067748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:45.128565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:45.251337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:15:46.435067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:46.435146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:46.435185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:46.438063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:46.438569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:46.500213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:47.118990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:15:50.335811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:50.342559Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:15:50.347488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:50.387440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.387548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.430327Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:50.432621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:50.628858Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.628989Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.632250Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.635343Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.636008Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.636669Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.636986Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.637193Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.637335Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.637463Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.637568Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.653182Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:50.846728Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:50.879567Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:15:50.879680Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:15:50.913881Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:15:50.916674Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:15:50.916940Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:15:50.917004Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:15:50.917059Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:15:50.917108Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:15:50.917152Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:15:50.917201Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:15:50.917674Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:15:50.919544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1923:2460] 2026-02-19T08:15:50.923158Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:15:50.930286Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Describe result: PathErrorUnknown 2026-02-19T08:15:50.930345Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Creating table 2026-02-19T08:15:50.930419Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:15:50.954895Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:50.955054Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2157:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:50.956777Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2158:2602], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:15:50.989214Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2655] 2026-02-19T08:15:50.990391Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2231:2655], schemeshard id = 72075186224037897 2026-02-19T08:15:51.006776Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:2139:2590] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-02-19T08:15:51.012273Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-02-19T08:15:51.155763Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:15:51.284273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:15:51.478697Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:15:51.480933Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2573:2883], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:15:51.484756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:51.488701Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:15:51.488809Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Subscribe on create table tx: 281474976720658 2026-02-19T08:15:51.503949Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Subscribe on tx: 281474976720658 registered 2026-02-19T08:15:51.758145Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-02-19T08:15:51.915388Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:15:51.915525Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2127:2585] Owner: [2:2126:2584]. Column diff is empty, finishing 2026-02-19T08:15:52.612338Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:52.616192Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2766:3379] Owner: [1:2765:3378]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:15:52.616282Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2766:3379] Owner: [1:2765:3378]. Column diff is empty, finishing 2026-02-19T08:15:52.616771Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2775:3382], ActorId: [1:2776:3383], Starting query actor #1 [1:2777:3384] 2026-02-19T08:15:52.616858Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2776:3383], ActorId: [1:2777:3384], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:15:52.642553Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2776:3383], ActorId: [1:2777:3384], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=YjZkMTM3MmEtNzIzOWFjYzQtYmZhNzRlMGEtMTUyOGNjZWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:15:53.234985Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2797:3398]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:15:53.235387Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:15:53.237641Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:2799:3400] 2026-02-19T08:15:53.237803Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:2799:3400] 2026-02-19T08:15:53.238888Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2800:3005] 2026-02-19T08:15:53.239232Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:2799:3400], server id = [2:2800:3005], tablet id = 72075186224037894, status = OK 2026-02-19T08:15:53.239465Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2800:3005], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:15:53.242476Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:15:53.245423Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:15:53.245540Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:2797:3398], StatRequests.size() = 1 2026-02-19T08:15:53.591647Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:15:53.733112Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2776:3383], ActorId: [1:2777:3384], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjZkMTM3MmEtNzIzOWFjYzQtYmZhNzRlMGEtMTUyOGNjZWQ=, TxId: 2026-02-19T08:15:53.733218Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2776:3383], ActorId: [1:2777:3384], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YjZkMTM3MmEtNzIzOWFjYzQtYmZhNzRlMGEtMTUyOGNjZWQ=, TxId: 2026-02-19T08:15:53.733483Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2775:3382], ActorId: [1:2776:3383], Got response [1:2777:3384] SUCCESS 2026-02-19T08:15:53.734747Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2816:3405], ActorId: [1:2817:3406], Starting query actor #1 [1:2818:3407] 2026-02-19T08:15:53.734807Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2817:3406], ActorId: [1:2818:3407], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:15:53.737476Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2817:3406], ActorId: [1:2818:3407], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=OTkyOTljMC00ZmY5Y2ZkNS00MDI5ODFhZi0zODFkMmMxOA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-02-19T08:15:53.768129Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2827:3416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:15:53.768285Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:15:53.768329Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:2827:3416], StatRequests.size() = 1 2026-02-19T08:15:54.024988Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2817:3406], ActorId: [1:2818:3407], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=OTkyOTljMC00ZmY5Y2ZkNS00MDI5ODFhZi0zODFkMmMxOA==, TxId: 2026-02-19T08:15:54.025073Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2817:3406], ActorId: [1:2818:3407], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=OTkyOTljMC00ZmY5Y2ZkNS00MDI5ODFhZi0zODFkMmMxOA==, TxId: 2026-02-19T08:15:54.025407Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2816:3405], ActorId: [1:2817:3406], Got response [1:2818:3407] SUCCESS 2026-02-19T08:15:54.026391Z node 1 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 123 ], PathId[ [OwnerId: 1, LocalPathId: 1] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:15:54.029562Z node 1 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 123 ], RowsCount[ 0 ] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::ManageRequests >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> StatisticsSaveLoad::ForbidAccess [GOOD] >> TCmsTest::WalleRebootDownNode >> TCmsTest::Mirror3dcPermissions [GOOD] >> TCmsTest::VDisksEviction [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthNone >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2026-02-19T08:15:42.076097Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-02-19T08:15:42.076833Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-02-19T08:15:42.076863Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2026-02-19T08:15:42.441205Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-02-19T08:15:42.442035Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-02-19T08:15:42.442069Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2026-02-19T08:15:44.755121Z node 9 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 9, reason# Disconnected |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2026-02-19T08:15:55.404635Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:55.404742Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-02-19T08:15:55.404888Z node 26 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:02:00Z 2026-02-19T08:15:55.407180Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-02-19T08:15:55.408004Z node 26 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-02-19T08:15:55.408387Z node 26 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-02-19T08:15:55.408474Z node 26 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-02-19T08:15:55.408539Z node 26 :CMS DEBUG: cms.cpp:426: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-02-19T08:15:55.408723Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:55.408999Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:55.409067Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-02-19T08:15:55.409381Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-02-19T08:15:55.409439Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:15:55.409538Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-02-19T08:15:55.409587Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-02-19T08:15:55.409634Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326 ... torage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120622560 } 2026-02-19T08:15:55.839755Z node 26 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-02-19T08:15:55.839841Z node 26 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-02-19T08:15:55.839892Z node 26 :CMS DEBUG: cms.cpp:426: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-02-19T08:15:55.840023Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:55.840211Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-4, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:55.840253Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-02-19T08:15:55.840476Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.100000s 2026-02-19T08:15:55.840521Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:15:55.840596Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-02-19T08:15:55.840649Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-02-19T08:15:55.840684Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2026-02-19T08:15:55.840719Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2026-02-19T08:15:55.840754Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2026-02-19T08:15:55.840778Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2026-02-19T08:15:55.840809Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2026-02-19T08:15:55.840851Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 33, wbId# [33:8388350642965737326:1634689637] 2026-02-19T08:15:55.841012Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 120524072 ChangeTime: 120524072 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.841670Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 120524072 ChangeTime: 120524072 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.841799Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 120524072 ChangeTime: 120524072 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.841906Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 120524072 ChangeTime: 120524072 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.841954Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 33 CreateTime: 120524072 ChangeTime: 120524072 Path: "/33/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.841989Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 120524072 ChangeTime: 120524072 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.842053Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 120524072 ChangeTime: 120524072 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.842110Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 120524072 ChangeTime: 120524072 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-02-19T08:15:55.842154Z node 26 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:15:55.859274Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-02-19T08:15:55.859537Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 26 has not yet been completed" } RequestId: "user-r-4" Deadline: 0 } 2026-02-19T08:15:55.860038Z node 26 :CMS INFO: cms.cpp:1496: User user removes request user-r-4 2026-02-19T08:15:55.860079Z node 26 :CMS DEBUG: cms.cpp:1519: Resulting status: OK 2026-02-19T08:15:55.860137Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2026-02-19T08:15:55.860191Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 26 2026-02-19T08:15:55.860343Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-4, reason# explicit remove 2026-02-19T08:15:55.873020Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2026-02-19T08:15:55.873251Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-4" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TCmsTest::BridgeModeStateStorage [GOOD] >> TCmsTest::BridgeModeSysTablets >> TUrlMatcherTest::MatchRecursive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:15:43.484551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:43.682363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:43.790282Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:15:43.790946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:15:43.791170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:15:44.894040Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:45.069418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:45.069537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:45.128432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:45.165987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:15:46.437572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:46.438274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:46.438343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:46.438377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:46.439008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:46.512487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:47.117299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:15:50.609012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:50.616500Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:15:50.619743Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:15:50.653018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.653158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.708238Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:15:50.710919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:50.900845Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:50.900982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:50.902565Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.903326Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.904075Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.904970Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.905274Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.905420Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.905626Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.905764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.905923Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:15:50.922125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:51.116300Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:51.167229Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:15:51.167317Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:15:51.230529Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:15:51.230807Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:15:51.231054Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:15:51.231131Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:15:51.231183Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:15:51.231242Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:15:51.231304Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:15:51.231362Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:15:51.231851Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:15:51.237251Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:51.237378Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:15:51.264681Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:15:51.266012Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:15:51.338078Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:15:51.341235Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:15:51.352230Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:15:51.352299Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:15:51.352382Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:15:51.357702Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:15:51.363305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:51.377502Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:15:51.377645Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:15:51.395079Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:15:51.636212Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:15:51.657052Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:15:52.002600Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:15:52.117374Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:15:52.117479Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:15:52.889427Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:53.232145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2751:3374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.232493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.233111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2769:3379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.233222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.359875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:53.817821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3054:3428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.819430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.952897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3058:3431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.953066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.954516Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3061:3434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:15:53.954712Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:15:53.954806Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:3063:3436] 2026-02-19T08:15:53.954893Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:3063:3436] 2026-02-19T08:15:53.955545Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3064:3185] 2026-02-19T08:15:53.955828Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:3063:3436], server id = [2:3064:3185], tablet id = 72075186224037894, status = OK 2026-02-19T08:15:53.956103Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3064:3185], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:15:53.956221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:15:53.956490Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:15:53.956574Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:3061:3434], StatRequests.size() = 1 2026-02-19T08:15:53.980760Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:15:53.981561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3068:3440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.981753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.982379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3072:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.982541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.982654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3075:3447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:53.990161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:54.126973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:15:54.127063Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:15:54.201384Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3063:3436], schemeshard count = 1 2026-02-19T08:15:54.534210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3077:3449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:15:54.683079Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:3179:3514] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:54.696771Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3202:3530]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:15:54.696947Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:15:54.696998Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:3202:3530], StatRequests.size() = 1 2026-02-19T08:15:54.993151Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3280:3560], for# user@builtin, access# DescribeSchema 2026-02-19T08:15:54.993241Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3280:3560], for# user@builtin, access# DescribeSchema 2026-02-19T08:15:55.006488Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:3270:3556], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:15:55.009149Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NmFiYThlNDctY2E5YTVkNmMtMmQxNTExNzUtZGNhMzU0ZjU=, ActorId: [1:3261:3548], ActorState: ExecuteState, LegacyTraceId: 01khtfgjhybdjbdzp8g6z6p87f, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/Database/.metadata/_statistics]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::RacyStartCollecting >> TAuditTest::DeniedPathsAreNotAudited [GOOD] >> TCmsTest::BridgeModeCollectInfo [GOOD] >> TCmsTest::BridgeModeGroups >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ManageRequests [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray >> TestMalformedRequest::InvalidHttpStartLine2 >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TCmsTest::RacyStartCollecting [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TestMalformedRequest::InvalidHttpStartLine4 |91.0%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RacyStartCollecting [GOOD] Test command err: 2026-02-19T08:15:59.576610Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 26:26 2026-02-19T08:15:59.576667Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 27:27 2026-02-19T08:15:59.576682Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 28:28 2026-02-19T08:15:59.576696Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 29:29 2026-02-19T08:15:59.576709Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 30:30 2026-02-19T08:15:59.576722Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 31:31 2026-02-19T08:15:59.576741Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 32:32 2026-02-19T08:15:59.576760Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 25:25 2026-02-19T08:15:59.582875Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 26:26 2026-02-19T08:15:59.582943Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 27:27 2026-02-19T08:15:59.582969Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 28:28 2026-02-19T08:15:59.583025Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 29:29 2026-02-19T08:15:59.583057Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 30:30 2026-02-19T08:15:59.583080Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 31:31 2026-02-19T08:15:59.583100Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 32:32 2026-02-19T08:15:59.583150Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 25:25 2026-02-19T08:15:59.612289Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 26:26 2026-02-19T08:15:59.612369Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 27:27 2026-02-19T08:15:59.612393Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 28:28 2026-02-19T08:15:59.612411Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 29:29 2026-02-19T08:15:59.612430Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 30:30 2026-02-19T08:15:59.612469Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 31:31 2026-02-19T08:15:59.612493Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 32:32 2026-02-19T08:15:59.612514Z node 25 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 25:25 >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::TestLogOperationsRollback >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TestMalformedRequest::CompressedDeflateContentLengthNone [GOOD] >> TCmsTest::BridgeModeGroups [GOOD] >> TCmsTest::BridgeModeNodeLimit >> TCmsTest::SamePriorityRequest2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:19.782378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:19.782464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:19.782541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:19.782590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:19.782623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:19.782676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:19.782714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:19.782741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:19.782809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:19.782886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:19.783728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:19.784010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:19.911242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:19.911295Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:19.937245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:19.938021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:19.938226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:19.950607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:19.952890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:19.953651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:19.954046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:19.958201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.958393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:19.959649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:19.959713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:19.959837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:19.959908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:19.959959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:19.960116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:20.116551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.117576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.117691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.117959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:20.118794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... cipient [4:723:2707]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-02-19T08:16:00.306919Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3242: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-02-19T08:16:00.307033Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000038 last cleanup 0 2026-02-19T08:16:00.307129Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:16:00.307185Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2026-02-19T08:16:00.307237Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2026-02-19T08:16:00.307277Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2026-02-19T08:16:00.307471Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [4:723:2707]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:16:00.307779Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3537: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-02-19T08:16:00.308852Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [4:723:2707], Recipient [4:133:2156]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 135 Memory: 124456 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 451 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 4 StartTime: 135 TableOwnerId: 72057594046678944 FollowerId 2026-02-19T08:16:00.308922Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:16:00.308981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0135 2026-02-19T08:16:00.309110Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:16:00.309164Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:16:00.310651Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435080, Sender [4:1475:3415], Recipient [4:723:2707]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2026-02-19T08:16:00.355001Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:16:00.355090Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:16:00.355131Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-02-19T08:16:00.355216Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 1 2026-02-19T08:16:00.355268Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-02-19T08:16:00.355390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-02-19T08:16:00.355461Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-02-19T08:16:00.355496Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-02-19T08:16:00.355583Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:220: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2026-02-19T08:16:00.355660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:607: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-02-19T08:16:00.355758Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:00.366312Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:16:00.366401Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:16:00.366436Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:16:00.630205Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:00.630281Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:00.630375Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:00.630409Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:00.988910Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:00.988992Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:00.989095Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:00.989142Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:01.374649Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:01.374719Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:01.374814Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:01.374863Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:01.752290Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:01.752381Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:01.752473Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:01.752506Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:02.135554Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:02.135635Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:02.135735Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:02.135766Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:02.199639Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [4:723:2707]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:16:02.509743Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:02.509841Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:16:02.509938Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:02.509972Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> TestMalformedRequest::CompressedDeflateContentLengthLower >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTest::DisableMaintenance >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::WalleDisableMaintenance |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TestMalformedRequest::InvalidHttpStartLine2 [GOOD] >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> TestMalformedRequest::InvalidHttpStartLine3 >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |91.0%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TestMalformedRequest::InvalidHttpStartLine4 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] >> TCmsTest::BridgeModeNodeLimit [GOOD] >> TestMalformedRequest::InvalidHttpStartLine5 >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TCmsTest::TestLogOperationsRollback [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TCmsTest::DisableMaintenance [GOOD] >> TCmsTest::WalleDisableMaintenance [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] Test command err: 2026-02-19T08:15:47.875621Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:47.875733Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-02-19T08:15:47.875874Z node 9 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:02:00Z 2026-02-19T08:15:47.878100Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-02-19T08:15:47.878865Z node 9 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-02-19T08:15:47.879191Z node 9 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-02-19T08:15:47.879273Z node 9 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-02-19T08:15:47.879357Z node 9 :CMS DEBUG: cms.cpp:426: Result: DISALLOW_TEMP (reason: VDisks eviction from host 9 has not yet been completed) 2026-02-19T08:15:47.879516Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:47.879761Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:47.879823Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 9, marker# MARKER_DISK_FAULTY 2026-02-19T08:15:47.880084Z node 9 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-02-19T08:15:47.880137Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:15:47.880244Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:15:47.880293Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-02-19T08:15:47.880329Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-02-19T08:15:47.880361Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-02-19T08:15:47.880413Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-02-19T08:15:47.880453Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-02-19T08:15:47.880495Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, ... sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:15:53.096017Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-02-19T08:15:53.096075Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 9:9, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-02-19T08:15:53.096121Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2026-02-19T08:15:53.096369Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-02-19T08:15:53.096608Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-02-19T08:15:53.096754Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2026-02-19T08:15:53.096813Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:9 2026-02-19T08:15:53.096846Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2026-02-19T08:15:53.109848Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-02-19T08:15:53.109935Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-02-19T08:15:53.125792Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:53.125913Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-02-19T08:15:53.125987Z node 9 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:03:00Z 2026-02-19T08:15:53.126983Z node 9 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:53.127107Z node 9 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } 2026-02-19T08:15:53.127169Z node 9 :CMS DEBUG: node_checkers.cpp:121: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2026-02-19T08:15:53.127227Z node 9 :CMS DEBUG: cms.cpp:771: Ring: 0; State: Ok 2026-02-19T08:15:53.127255Z node 9 :CMS DEBUG: cms.cpp:771: Ring: 1; State: Ok 2026-02-19T08:15:53.127274Z node 9 :CMS DEBUG: cms.cpp:771: Ring: 2; State: Ok 2026-02-19T08:15:53.127304Z node 9 :CMS DEBUG: cms.cpp:415: Result: ALLOW 2026-02-19T08:15:53.127475Z node 9 :CMS DEBUG: cms.cpp:1121: Accepting permission: id# user-p-1, requestId# user-r-2, owner# user, priority# 0 2026-02-19T08:15:53.127547Z node 9 :CMS INFO: cluster_info.cpp:806: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-02-19T08:15:53.127653Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:53.127869Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.210512Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-02-19T08:15:53.128000Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:53.140273Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-02-19T08:15:53.140571Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } Deadline: 780210512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2026-02-19T08:15:53.140628Z node 9 :CMS DEBUG: cms.cpp:1149: Schedule cleanup at 1970-01-01T00:33:00.210512Z 2026-02-19T08:15:53.178319Z node 9 :CMS INFO: cluster_info.cpp:806: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-02-19T08:15:53.178761Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:53.178829Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-02-19T08:15:53.178887Z node 9 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:03:00Z 2026-02-19T08:15:53.179743Z node 9 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:53.179833Z node 9 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2026-02-19T08:15:53.179885Z node 9 :CMS DEBUG: node_checkers.cpp:121: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2026-02-19T08:15:53.179934Z node 9 :CMS DEBUG: cms.cpp:415: Result: ALLOW 2026-02-19T08:15:53.180082Z node 9 :CMS DEBUG: cms.cpp:1121: Accepting permission: id# user-p-2, requestId# user-r-3, owner# user, priority# 0 2026-02-19T08:15:53.180145Z node 9 :CMS INFO: cluster_info.cpp:806: Adding lock for Host ::1:12002 (10) (permission user-p-2 until 1970-01-01T00:13:00Z) 2026-02-19T08:15:53.180222Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:53.180368Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.312024Z, action# Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 2026-02-19T08:15:53.180463Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:53.192924Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-02-19T08:15:53.193180Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-3" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-3" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 } Deadline: 780312024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 10 InterconnectPort: 12002 } } } } 2026-02-19T08:15:53.193700Z node 9 :CMS INFO: cms.cpp:1418: User user is done with permissions user-p-1 2026-02-19T08:15:53.193795Z node 9 :CMS DEBUG: cms.cpp:1441: Resulting status: OK 2026-02-19T08:15:53.193877Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-02-19T08:15:53.193983Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 9 2026-02-19T08:15:53.194091Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-1 was removed 2026-02-19T08:15:53.194141Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2026-02-19T08:15:53.206490Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-02-19T08:15:53.206690Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-02-19T08:15:53.207259Z node 9 :CMS INFO: cms.cpp:1418: User user is done with permissions user-p-2 2026-02-19T08:15:53.207323Z node 9 :CMS DEBUG: cms.cpp:1441: Resulting status: OK 2026-02-19T08:15:53.207406Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-02-19T08:15:53.207530Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 10 2026-02-19T08:15:53.207612Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# permission user-p-2 was removed 2026-02-19T08:15:53.207656Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2026-02-19T08:15:53.220580Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-02-19T08:15:53.220859Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-02-19T08:16:04.934764Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 49 2026-02-19T08:16:04.935494Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 52 2026-02-19T08:16:04.935838Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 50 2026-02-19T08:16:04.935953Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 51 2026-02-19T08:16:04.935992Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 57 2026-02-19T08:16:04.936028Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 58 2026-02-19T08:16:04.936064Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 59 2026-02-19T08:16:04.936100Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 60 2026-02-19T08:16:04.936132Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 61 2026-02-19T08:16:04.936166Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 62 2026-02-19T08:16:04.936199Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 63 2026-02-19T08:16:04.936235Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 64 2026-02-19T08:16:04.936268Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 53 2026-02-19T08:16:04.936298Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 54 2026-02-19T08:16:04.936333Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 55 2026-02-19T08:16:04.936365Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 56 >> CopyTable::WithUncommittedData >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::DisableMaintenance [GOOD] Test command err: 2026-02-19T08:15:42.637835Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false } } 2026-02-19T08:15:42.638366Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-02-19T08:15:42.668747Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:42.668911Z node 2 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:02:00Z 2026-02-19T08:15:42.670776Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-02-19T08:15:42.671528Z node 2 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-02-19T08:15:42.671741Z node 2 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.084512s 2026-02-19T08:15:42.671803Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:15:42.671938Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-02-19T08:15:42.671983Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-02-19T08:15:42.672012Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-02-19T08:15:42.672054Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-02-19T08:1 ... nel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.910480Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.910763Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.910846Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.910955Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.910999Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.911061Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.911095Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-02-19T08:15:52.911143Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:15:52.911405Z node 2 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2026-02-19T08:15:52.911525Z node 2 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:15:52.911769Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-02-19T08:15:52.912003Z node 2 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-02-19T08:15:52.912051Z node 2 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 2026-02-19T08:15:52.925236Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-02-19T08:15:52.952174Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-02-19T08:15:52.952259Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-02-19T08:15:52.952317Z node 2 :CMS DEBUG: cluster_info.cpp:1015: Timestamp: 1970-01-01T00:04:00Z 2026-02-19T08:15:52.953141Z node 2 :CMS INFO: cms.cpp:365: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:52.953251Z node 2 :CMS DEBUG: cms.cpp:407: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } 2026-02-19T08:15:52.953302Z node 2 :CMS DEBUG: cms.cpp:426: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2026-02-19T08:15:52.953414Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:28: TTxStorePermissions Execute 2026-02-19T08:15:52.953544Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-02-19T08:15:52.967654Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-02-19T08:15:52.967956Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-2" } 2026-02-19T08:15:52.968643Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-02-19T08:15:52.981530Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2026-02-19T08:15:52.981810Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false 2026-02-19T08:15:57.945046Z node 2 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:15:57.945135Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:15:57.946036Z node 2 :CMS DEBUG: cms.cpp:1233: Running CleanupWalleTasks 2026-02-19T08:15:57.946418Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-02-19T08:15:57.946510Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-02-19T08:15:57.946547Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-02-19T08:15:57.946585Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-02-19T08:15:57.946614Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-02-19T08:15:57.946642Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-02-19T08:15:57.946670Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-02-19T08:15:57.946705Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:15:57.947101Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.947688Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.947831Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.947982Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.948096Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.948165Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.948229Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.948296Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-02-19T08:15:57.948378Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:15:57.948711Z node 2 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-02-19T08:15:57.948796Z node 2 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:15:57.949112Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-02-19T08:15:57.949385Z node 2 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-02-19T08:15:57.949440Z node 2 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] Test command err: 2026-02-19T08:15:55.881177Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-02-19T08:15:55.991913Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-02-19T08:15:56.007446Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-02-19T08:15:56.128759Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-02-19T08:16:00.722704Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 17:17 2026-02-19T08:16:00.722786Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 18:18 2026-02-19T08:16:00.722815Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 19:19 2026-02-19T08:16:00.722839Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 20:20 2026-02-19T08:16:00.722863Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 21:21 2026-02-19T08:16:00.722887Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 22:22 2026-02-19T08:16:00.722913Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 23:23 2026-02-19T08:16:00.722935Z node 17 :CMS ERROR: cluster_info.cpp:512: Cannot update state for unknown PDisk 24:24 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableMaintenance [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthLower [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> TColumnShardTestSchema::HotTiers [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthCorrect >> CopyTable::WithUncommittedData [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.034087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.034183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.034281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.034327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.034365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.034403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.034441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.034471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.034596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.034710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.035616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.038039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.177913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.177977Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.194576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.195562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.242441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.243439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.244230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.244574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.248556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.248713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.268262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.268351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.269231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.269300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.269361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.290795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.476037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.478046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.478131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.478187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.478259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... shard: 72057594046678944 2026-02-19T08:16:10.360827Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2026-02-19T08:16:10.360895Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-02-19T08:16:10.361564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:10.364217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:16:10.364256Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:10.364343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:16:10.364666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:16:10.364701Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:10.364959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:16:10.365754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:16:10.366019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877763, Sender [1:1299:3148], Recipient [1:686:2671]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1299:3148] ServerId: [1:1301:3150] } 2026-02-19T08:16:10.366064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5395: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:16:10.366093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6261: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:16:10.366552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:16:10.366608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:16:10.367189Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:1315:3164], Recipient [1:686:2671]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:10.367248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:10.367287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:16:10.367419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [1:911:2843], Recipient [1:686:2671]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2026-02-19T08:16:10.367449Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:16:10.367519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:16:10.367592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:16:10.367621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1313:3162] 2026-02-19T08:16:10.367755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [1:1315:3164], Recipient [1:686:2671]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:16:10.367782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:16:10.367829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-02-19T08:16:10.368500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1316:3165], Recipient [1:686:2671]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:16:10.368572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:16:10.368710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:16:10.368973Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 265us result status StatusSuccess 2026-02-19T08:16:10.369379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:16:10.370069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271188001, Sender [1:1317:3166], Recipient [1:686:2671]: NKikimrPQ.TEvPeriodicTopicStats PathId: 40 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2026-02-19T08:16:10.370111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5336: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-02-19T08:16:10.370169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 40] DataSize 151 UsedReserveSize 151 2026-02-19T08:16:10.370204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-02-19T08:16:10.370562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1318:3167], Recipient [1:686:2671]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:16:10.370598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:16:10.370666Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:16:10.372724Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 121us result status StatusSuccess 2026-02-19T08:16:10.373193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489509.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489509.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489509.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489509.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489509.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489509.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488309.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489509.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151489509.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488309.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488309.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151488309.000000s;Name=;Codec=}; 2026-02-19T08:15:09.793615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:15:09.860577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:15:09.860859Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:15:09.877673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:15:09.882058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:15:09.882375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:15:09.882548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:15:09.882676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:15:09.882784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:15:09.882933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:15:09.883061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:15:09.883187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:15:09.883303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:15:09.883408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:15:09.883511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:15:09.883618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:15:09.938997Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:15:09.939198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:15:09.939261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:15:09.939444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:09.939583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:15:09.939653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:15:09.939696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:15:09.939796Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:15:09.939863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:15:09.939904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:15:09.939936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:15:09.940108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:15:09.940163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:15:09.940200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:15:09.940244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:15:09.940337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:15:09.940388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:15:09.940433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:15:09.940460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:15:09.940503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:15:09.940538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:15:09.940568Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:15:09.940644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:15:09.940703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:15:09.940748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:15:09.940932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:15:09.941001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:15:09.941042Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:15:09.941167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:15:09.941209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:15:09.941 ... ataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488958072:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:16:10.071251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488958072:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:16:10.071801Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488958072:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-02-19T08:16:10.073520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488958072:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:16:10.074549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488958072:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:1455:3399];trace_detailed=; 2026-02-19T08:16:10.075051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-02-19T08:16:10.075283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-02-19T08:16:10.075586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.075809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.076253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:10.076418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.076559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.076806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1455:3399] finished for tablet 9437184 2026-02-19T08:16:10.077353Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1454:3398];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":65903623,"name":"_full_task","f":65903623,"d_finished":0,"c":0,"l":65906037,"d":2414},"events":[{"name":"bootstrap","f":65903935,"d_finished":1096,"c":1,"l":65905031,"d":1096},{"a":65905394,"name":"ack","f":65905394,"d_finished":0,"c":0,"l":65906037,"d":643},{"a":65905373,"name":"processing","f":65905373,"d_finished":0,"c":0,"l":65906037,"d":664},{"name":"ProduceResults","f":65904605,"d_finished":715,"c":2,"l":65905749,"d":715},{"a":65905754,"name":"Finish","f":65905754,"d_finished":0,"c":0,"l":65906037,"d":283}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.077463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1454:3398];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:10.077958Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1454:3398];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":65903623,"name":"_full_task","f":65903623,"d_finished":0,"c":0,"l":65906681,"d":3058},"events":[{"name":"bootstrap","f":65903935,"d_finished":1096,"c":1,"l":65905031,"d":1096},{"a":65905394,"name":"ack","f":65905394,"d_finished":0,"c":0,"l":65906681,"d":1287},{"a":65905373,"name":"processing","f":65905373,"d_finished":0,"c":0,"l":65906681,"d":1308},{"name":"ProduceResults","f":65904605,"d_finished":715,"c":2,"l":65905749,"d":715},{"a":65905754,"name":"Finish","f":65905754,"d_finished":0,"c":0,"l":65906681,"d":927}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; Got TEvKqpCompute::TEvScanData [1:1455:3399]->[1:1454:3398] 2026-02-19T08:16:10.078073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:10.073490Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:16:10.078118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:10.078253Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> CopyTable::WithUncommittedData [GOOD] Test command err: 2026-02-19T08:16:09.599860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:09.634893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:09.635226Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:09.643669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:09.643932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:09.644222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:09.644365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:09.644484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:09.644627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:09.644754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:09.644864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:09.644991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:09.645113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:09.645254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:09.645386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:09.645518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:09.676407Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:09.676639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:09.676691Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:09.676894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:09.677094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:09.677178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:09.677226Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:09.677337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:09.677410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:09.677465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:09.677497Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:09.677689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:09.677791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:09.677844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:09.677896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:09.678038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:09.678099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:09.678141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:09.678173Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:09.678227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:09.678264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:09.678320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:09.678372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:09.678427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:09.678465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:09.678707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:09.678766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:09.678804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:09.678953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:09.679010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:09.679053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:09.679105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:09.679147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:09.679185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:09.679230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:09.679271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:09.679306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:09.679508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:09.679579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... o":"10","t":"AssembleOriginalData"},"w":25,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":25,"id":16},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":25,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":25,"id":10},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":25,"id":6},"22":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":25,"id":12}}}; 2026-02-19T08:16:10.567869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1771488970566:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:16:10.569994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1771488970566:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:340:2352];trace_detailed=; 2026-02-19T08:16:10.570629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);; 2026-02-19T08:16:10.570884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-02-19T08:16:10.571361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.571574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.571822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:10.572041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.572247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.572490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:340:2352] finished for tablet 9437184 2026-02-19T08:16:10.572956Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:339:2351];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1602197,"name":"_full_task","f":1602197,"d_finished":0,"c":0,"l":1604844,"d":2647},"events":[{"name":"bootstrap","f":1602466,"d_finished":1466,"c":1,"l":1603932,"d":1466},{"a":1604097,"name":"ack","f":1604097,"d_finished":0,"c":0,"l":1604844,"d":747},{"a":1604079,"name":"processing","f":1604079,"d_finished":0,"c":0,"l":1604844,"d":765},{"name":"ProduceResults","f":1603444,"d_finished":910,"c":2,"l":1604583,"d":910},{"a":1604591,"name":"Finish","f":1604591,"d_finished":0,"c":0,"l":1604844,"d":253}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.573046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:339:2351];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:10.573454Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:339:2351];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1602197,"name":"_full_task","f":1602197,"d_finished":0,"c":0,"l":1605394,"d":3197},"events":[{"name":"bootstrap","f":1602466,"d_finished":1466,"c":1,"l":1603932,"d":1466},{"a":1604097,"name":"ack","f":1604097,"d_finished":0,"c":0,"l":1605394,"d":1297},{"a":1604079,"name":"processing","f":1604079,"d_finished":0,"c":0,"l":1605394,"d":1315},{"name":"ProduceResults","f":1603444,"d_finished":910,"c":2,"l":1604583,"d":910},{"a":1604591,"name":"Finish","f":1604591,"d_finished":0,"c":0,"l":1605394,"d":803}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:10.573554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:10.567815Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:16:10.573604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:10.573782Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:340:2352];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::RebootColdTiers >> CopyTable::CopyToItself_Negative >> MoveTable::WithData-Reboot >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2026-02-19T08:13:30.647417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.704578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.704829Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.712343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.712601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.712838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.712966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.713084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.713202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.713313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.713422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.713526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.713667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.713814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.713948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.714094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.746731Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.746904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.746975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.747151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.747309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.747395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.747437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.747522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.747599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.747651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.747681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.747881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.747948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.747990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.748019Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.748102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.748161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.748227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.748261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.748354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.748403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.748470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.748520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.748567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.748606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.748812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.748864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.748897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.749042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.749087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.749117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.749162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.749211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.749270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.749320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.749357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.749388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.749522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.749565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-02-19T08:16:06.988150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=404; 2026-02-19T08:16:06.988231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=30278; 2026-02-19T08:16:06.988290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=30455; 2026-02-19T08:16:06.988371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=21; 2026-02-19T08:16:06.988574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=145; 2026-02-19T08:16:06.988636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43574; 2026-02-19T08:16:06.988831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=126; 2026-02-19T08:16:06.988991Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=103; 2026-02-19T08:16:06.989209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=161; 2026-02-19T08:16:06.989416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=140; 2026-02-19T08:16:06.998970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9466; 2026-02-19T08:16:07.008941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9828; 2026-02-19T08:16:07.009070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-02-19T08:16:07.009140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-02-19T08:16:07.009200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2026-02-19T08:16:07.009298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2026-02-19T08:16:07.009358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2026-02-19T08:16:07.009474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=69; 2026-02-19T08:16:07.009531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-02-19T08:16:07.009617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2026-02-19T08:16:07.009746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=79; 2026-02-19T08:16:07.009862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=64; 2026-02-19T08:16:07.009915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=76532; 2026-02-19T08:16:07.010134Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4824360;raw_bytes=6980626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100285256;raw_bytes=169505560;count=4;records=1775000} inactive {blob_bytes=344766168;raw_bytes=550856012;count=38;records=6110000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:16:07.010280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:16:07.010362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:16:07.010453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:16:07.010532Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:16:07.010692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:07.010786Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:07.010856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:07.010934Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488516026;tx_id=18446744073709551615;;current_snapshot_ts=1771488811724; 2026-02-19T08:16:07.011002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:07.011067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.011124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.011253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:07.011496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.111000s; 2026-02-19T08:16:07.018850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:16:07.019346Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:16:07.019419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:07.019510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:07.019588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:07.019674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488516026;tx_id=18446744073709551615;;current_snapshot_ts=1771488811724; 2026-02-19T08:16:07.019738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:07.019794Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.019848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.019947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:189;event=skip_actualization;waiting=0.999000s; 2026-02-19T08:16:07.020011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:07.021636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.107000s; 2026-02-19T08:16:07.021699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5060:7027];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2026-02-19T08:13:30.616364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.649424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.649667Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.656833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.657096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.657335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.657450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.657544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.657666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.657786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.657899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.658021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.658155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.658272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.658401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.658527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.688722Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.688880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.688941Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.689125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.689283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.689361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.689450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.689551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.689616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.689663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.689694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.689916Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.690008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.690049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.690095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.690197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.690250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.690307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.690343Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.690406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.690448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.690491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.690543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.690589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.690625Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.690835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.690886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.690922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.691053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.691097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.691137Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.691196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.691242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.691275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.691320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.691357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.691389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.691556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.691605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... de 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-02-19T08:16:07.229633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=379; 2026-02-19T08:16:07.229722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=25695; 2026-02-19T08:16:07.229774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=25866; 2026-02-19T08:16:07.229842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-02-19T08:16:07.230025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=137; 2026-02-19T08:16:07.230077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=26651; 2026-02-19T08:16:07.230258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2026-02-19T08:16:07.230408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=92; 2026-02-19T08:16:07.230616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=163; 2026-02-19T08:16:07.230789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=127; 2026-02-19T08:16:07.238575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7716; 2026-02-19T08:16:07.247600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8899; 2026-02-19T08:16:07.247709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-02-19T08:16:07.247768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-02-19T08:16:07.247812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-02-19T08:16:07.247894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2026-02-19T08:16:07.247940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-02-19T08:16:07.248042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=63; 2026-02-19T08:16:07.248084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:16:07.248145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-02-19T08:16:07.248238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-02-19T08:16:07.248335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=62; 2026-02-19T08:16:07.248377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=55419; 2026-02-19T08:16:07.248574Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=4825976;raw_bytes=7660626;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=100319800;raw_bytes=183705560;count=4;records=1775000} inactive {blob_bytes=344884680;raw_bytes=599736012;count=38;records=6110000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:16:07.248696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:16:07.248784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:16:07.248860Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:16:07.248915Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:16:07.249035Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:07.249103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:07.249162Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:07.249242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488516017;tx_id=18446744073709551615;;current_snapshot_ts=1771488811707; 2026-02-19T08:16:07.249292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:07.249339Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.249386Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.249493Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:07.249739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.195000s; 2026-02-19T08:16:07.254202Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:16:07.254841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:16:07.254917Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:07.255012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:07.255084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:07.255176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488516017;tx_id=18446744073709551615;;current_snapshot_ts=1771488811707; 2026-02-19T08:16:07.255256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:07.255323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.255375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:07.255479Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:189;event=skip_actualization;waiting=0.999000s; 2026-02-19T08:16:07.255546Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:07.256931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.033000s; 2026-02-19T08:16:07.256994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5084:7052];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.032361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.032447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.033397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.033453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.033486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.033523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.033552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.033575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.033682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.035894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.036769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.037775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.174636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.174694Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.195461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.198269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.232064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.232946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.233833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.234819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.247811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.248036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.268253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.268346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.269221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.269300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.269358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.291138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.468292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.472933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ts: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.603404Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186233409546][Partition][2][StateIdle] Try persist 2026-02-19T08:16:11.603450Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:11.603469Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.603491Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:11.603512Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.603530Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186233409546][Partition][0][StateIdle] Try persist 2026-02-19T08:16:11.637616Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:162: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-02-19T08:16:11.637680Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:450: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-02-19T08:16:11.638115Z node 1 :PERSQUEUE DEBUG: partition.cpp:1031: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-02-19T08:16:11.638261Z node 1 :PERSQUEUE DEBUG: partition.cpp:1031: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-02-19T08:16:11.638359Z node 1 :PERSQUEUE DEBUG: partition.cpp:1031: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-02-19T08:16:11.640638Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__mlp_balancing.cpp:121: Handle TEvPersQueue::TEvStatusResponse TabletId: 72075186233409546 PartResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 25166274 AvgWriteSpeedPerMin: 25166274 AvgWriteSpeedPerHour: 25166274 AvgWriteSpeedPerDay: 25166274 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 16975298 StartOffset: 0 EndOffset: 1 LastWriteTimestampMs: 362 WriteLagMs: 349 AvgQuotaSpeedPerSec: 25166274 AvgQuotaSpeedPerMin: 25166274 AvgQuotaSpeedPerHour: 25166274 AvgQuotaSpeedPerDay: 25166274 SourceIdCount: 1 SourceIdRetentionPeriodSec: 2 UsedReserveSize: 16975298 AggregatedCounters { Values: 343 Values: 0 Values: 1 Values: 1 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 45532800 Values: 45532800 Values: 17 Values: 349 Values: 9223372036854775807 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 24672817647 Values: 2988 Values: 3 Values: 45532800 Values: 16975298 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } PartResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 343 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 343 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 45532800 Values: 45532800 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 45532800 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } PartResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 343 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 343 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 45532800 Values: 45532800 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 45532800 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } 2026-02-19T08:16:11.641069Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:532: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2026-02-19T08:16:11.641117Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-02-19T08:16:11.641394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 16975298 2026-02-19T08:16:11.658229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:16:11.680152Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:16:11.680219Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:11.680286Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680311Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186233409546][Partition][1][StateIdle] Try persist 2026-02-19T08:16:11.680376Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:16:11.680397Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680418Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:11.680441Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680460Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186233409546][Partition][2][StateIdle] Try persist 2026-02-19T08:16:11.680530Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:11.680554Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680574Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:11.680597Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:11.680619Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186233409546][Partition][0][StateIdle] Try persist 2026-02-19T08:16:11.704243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:16:11.704489Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 275us result status StatusSuccess 2026-02-19T08:16:11.704964Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.032117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.032191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.033007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.033054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.033086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.033127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.033154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.033189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.033267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.035369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.036220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.038032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.164947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.164993Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.194576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.195941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.228663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.229579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.232926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.235244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.245565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.248009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.268204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.268330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.269267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.269349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.269420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.290784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.475046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.477121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 817647 Values: 1987 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 2 ScaleStatus: NORMAL } PartResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 338 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 338 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 2 ScaleStatus: NORMAL } PartResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 338 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 338 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 2 ScaleStatus: NORMAL } 2026-02-19T08:16:11.226470Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:532: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2026-02-19T08:16:11.226533Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-02-19T08:16:11.226733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-02-19T08:16:11.246135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:16:11.657659Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:162: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-02-19T08:16:11.657772Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:450: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-02-19T08:16:11.658944Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__mlp_balancing.cpp:121: Handle TEvPersQueue::TEvStatusResponse TabletId: 72075186233409546 PartResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 25166274 AvgWriteSpeedPerMin: 25166274 AvgWriteSpeedPerHour: 25166274 AvgWriteSpeedPerDay: 25166274 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 16975298 StartOffset: 0 EndOffset: 1 LastWriteTimestampMs: 357 WriteLagMs: 344 AvgQuotaSpeedPerSec: 25166274 AvgQuotaSpeedPerMin: 25166274 AvgQuotaSpeedPerHour: 25166274 AvgQuotaSpeedPerDay: 25166274 SourceIdCount: 1 SourceIdRetentionPeriodSec: 2 UsedReserveSize: 0 AggregatedCounters { Values: 338 Values: 0 Values: 1 Values: 1 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 16975298 Values: 16975298 Values: 17 Values: 344 Values: 9223372036854775807 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 25166274 Values: 24672817647 Values: 2987 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } PartResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 338 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 338 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } PartResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 17 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 338 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 338 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 17 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 3 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 3 ScaleStatus: NORMAL } 2026-02-19T08:16:11.659406Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:532: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2026-02-19T08:16:11.659459Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-02-19T08:16:11.659655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-02-19T08:16:11.673540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:16:11.717420Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:16:11.717677Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 283us result status StatusSuccess 2026-02-19T08:16:11.718257Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:16:11.719180Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1008:2919] connected; active server actors: 1 2026-02-19T08:16:11.739100Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2026-02-19T08:16:11.739505Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-02-19T08:16:11.740675Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:399: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2026-02-19T08:16:11.770090Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1052:2953] connected; active server actors: 1 >> CopyTable::CopyToItself_Negative [GOOD] >> TColumnShardTestSchema::RebootHotTiers >> MoveTable::WithData-Reboot [GOOD] >> TestMalformedRequest::InvalidHttpStartLine3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> CopyTable::CopyToItself_Negative [GOOD] Test command err: 2026-02-19T08:16:12.199685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:12.228778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:12.229006Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:12.235863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:12.236118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:12.236314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:12.236402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:12.236497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:12.236594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:12.236698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:12.236783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:12.236903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:12.237012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.237140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:12.237255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:12.237367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:12.271074Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:12.271224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:12.271273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:12.271428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.271574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:12.271637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:12.271674Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:12.271738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:12.271783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:12.271813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:12.271841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:12.272020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.272098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:12.272147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:12.272173Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:12.272247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:12.272287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:12.272310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:12.272332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:12.272364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:12.272391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:12.272427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:12.272463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:12.272518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:12.272548Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:12.272686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:12.272728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:12.272764Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:12.272871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:12.272902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.272921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.272956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:12.272980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:12.273002Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:12.273027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:12.273058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:12.273087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:12.273232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:12.273306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2026-02-19T08:16:12.552745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=62; 2026-02-19T08:16:12.552888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=50; 2026-02-19T08:16:12.553058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=122; 2026-02-19T08:16:12.553494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=386; 2026-02-19T08:16:12.553578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=19; 2026-02-19T08:16:12.553629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-02-19T08:16:12.553672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-02-19T08:16:12.554947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=62; 2026-02-19T08:16:12.555009Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-02-19T08:16:12.555100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2026-02-19T08:16:12.555162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:16:12.555228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-02-19T08:16:12.555268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=16; 2026-02-19T08:16:12.555323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=24; 2026-02-19T08:16:12.555357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=4512; 2026-02-19T08:16:12.555495Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:16:12.555559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:16:12.555619Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:16:12.555851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:12.555889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:16:12.555957Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.129000s; 2026-02-19T08:16:12.556219Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:16:12.556276Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:16:12.556314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:12.556357Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:16:12.556473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.088000s; 2026-02-19T08:16:12.556515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; 2026-02-19T08:16:12.864111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137196321037056;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:12.864202Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137196321037056;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-02-19T08:16:12.867307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137196321037056;op_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137402482222976;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-02-19T08:16:12.867448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137196321037056;op_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1771488973171;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137402482222976;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:25;message=;tablet_id=9437184;tx_id=10; 2026-02-19T08:16:12.867806Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-02-19T08:16:12.867952Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1771488973171 at tablet 9437184, mediator 0 2026-02-19T08:16:12.868003Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-02-19T08:16:12.868333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:12.868430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:12.868503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:12.868609Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-02-19T08:16:12.877678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:257;event=new_schema;snapshot=plan_step=1771488973171;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-02-19T08:16:12.877845Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:16:12.877997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:433;method=RegisterTable;path_id={internal: 1000000185, ss: 1}; 2026-02-19T08:16:12.878068Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-02-19T08:16:12.878277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:933;message=creating tiling compaction optimizer; 2026-02-19T08:16:12.892869Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:676;event=OnTieringModified;path_id=1000000185; 2026-02-19T08:16:12.926897Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-02-19T08:16:12.928003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137196321088576;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1771488973174;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=schema.cpp:153;propose_execute=copy_table;src=1;dst=1; 2026-02-19T08:16:12.928095Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137196321088576;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1771488973174;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Copy to existing table; 2026-02-19T08:16:12.940490Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1771488973174;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;this=137196321088576;op_tx=11:TX_KIND_SCHEMA;min=1771488973174;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=propose_tx.cpp:16;message=Copy to existing table;tablet_id=9437184;tx_id=11; >> TestKinesisHttpProxy::DifferentContentTypes >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestGetQueueUrl >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData-Reboot [GOOD] Test command err: 2026-02-19T08:16:12.228482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:12.261007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:12.261291Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:12.269071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:12.269318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:12.269587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:12.269701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:12.269819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:12.269921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:12.270030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:12.270136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:12.270248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:12.270363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.270484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:12.270621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:12.270724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:12.302024Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:12.302204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:12.302262Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:12.302476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.302681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:12.302765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:12.302811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:12.302902Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:12.302962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:12.303006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:12.303036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:12.303268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.303341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:12.303398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:12.303432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:12.303537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:12.303592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:12.303632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:12.303658Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:12.303706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:12.303741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:12.303792Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:12.303849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:12.303899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:12.303936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:12.304133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:12.304200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:12.304239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:12.304370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:12.304426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.304464Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.304556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:12.304601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:12.304638Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:12.304697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:12.304746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:12.304784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:12.304935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:12.304979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.286ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.92ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:13.185188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:13.185527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.714ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.635ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.136ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.023ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.116ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.286ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.92ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:13.185843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.714ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.635ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.136ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.023ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.116ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.286ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.92ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:13.186221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:335:2347] finished for tablet 9437184 2026-02-19T08:16:13.186868Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:329:2341];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1480431,"name":"_full_task","f":1480431,"d_finished":0,"c":0,"l":1497916,"d":17485},"events":[{"name":"bootstrap","f":1480899,"d_finished":3512,"c":1,"l":1484411,"d":3512},{"a":1496801,"name":"ack","f":1494492,"d_finished":2139,"c":1,"l":1496631,"d":3254},{"a":1496788,"name":"processing","f":1484735,"d_finished":6239,"c":3,"l":1496635,"d":7367},{"name":"ProduceResults","f":1483449,"d_finished":3946,"c":6,"l":1497515,"d":3946},{"a":1497520,"name":"Finish","f":1497520,"d_finished":0,"c":0,"l":1497916,"d":396},{"name":"task_result","f":1484760,"d_finished":4035,"c":2,"l":1494335,"d":4035}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.714ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.635ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.136ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.023ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.116ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.286ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.92ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:13.186950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:329:2341];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:13.187536Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:329:2341];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1480431,"name":"_full_task","f":1480431,"d_finished":0,"c":0,"l":1498632,"d":18201},"events":[{"name":"bootstrap","f":1480899,"d_finished":3512,"c":1,"l":1484411,"d":3512},{"a":1496801,"name":"ack","f":1494492,"d_finished":2139,"c":1,"l":1496631,"d":3970},{"a":1496788,"name":"processing","f":1484735,"d_finished":6239,"c":3,"l":1496635,"d":8083},{"name":"ProduceResults","f":1483449,"d_finished":3946,"c":6,"l":1497515,"d":3946},{"a":1497520,"name":"Finish","f":1497520,"d_finished":0,"c":0,"l":1498632,"d":1112},{"name":"task_result","f":1484760,"d_finished":4035,"c":2,"l":1494335,"d":4035}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.714ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.635ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.136ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.023ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.116ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.286ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.92ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:13.187663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:13.098000Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-02-19T08:16:13.187720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:13.187911Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-02-19T08:16:13.188735Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-02-19T08:16:13.189113Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1771488973202:12} readable: {1771488973202:max} at tablet 9437184 2026-02-19T08:16:13.189250Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-02-19T08:16:13.189303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1771488973202:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:13.189387Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1771488973202:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; >> TestMalformedRequest::InvalidHttpStartLine5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> TestYmqHttpProxy::TestCreateQueue >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::MissingAction >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TestYmqHttpProxy::TestSendMessage >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::InvalidHttpStartLine3 [GOOD] Test command err: 2026-02-19T08:16:00.806357Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487148592070542:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:00.806450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:01.105794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:01.152135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:01.152236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:01.194759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:01.253117Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:01.262933Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487148592070502:2081] 1771488960800445 != 1771488960800448 2026-02-19T08:16:01.326116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:01.330238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:01.330258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:01.330268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:01.330388Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:01.511007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:01.724590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:01.729242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:16:01.732726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:01.750279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:16:01.811624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:01.851389Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:01.948857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.000249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.052848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.094014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.131627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.163275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.193728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.232160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.272277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.930036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487161476974056:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:03.930099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487161476974048:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:03.930296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:03.930782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487161476974063:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:03.930880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:03.934337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:03.944809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487161476974062:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:16:04.031097Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487165771941411:3101] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:04.366009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:04.406598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:04.437430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:04.468257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:04.501110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... rsion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.588778Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 17ms 2026-02-19T08:16:12.589180Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.589213Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:16:12.589307Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 18ms 2026-02-19T08:16:12.589626Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.589638Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 23ms 2026-02-19T08:16:12.589852Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.589872Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-02-19T08:16:12.589913Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 23ms 2026-02-19T08:16:12.590393Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.590634Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:12.724113Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487200682081216:2440]: Pool not found 2026-02-19T08:16:12.724337Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:16:12.960080Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487200682081231:2445]: Pool not found 2026-02-19T08:16:12.960346Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:16:12.962993Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487200682081332:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.963046Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7608487200682081333:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:16:12.963060Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.963275Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487200682081336:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.963389Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:13.193148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487200682081330:2461]: Pool not found 2026-02-19T08:16:13.193464Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http input full "POST /Root HTTP/1.1\r\nHost:example.amazonaws.com\r\nX-Amz-Target:AmazonSQS.CreateQueue\r\nX-Amz-Date:20150830T123600Z\r\nAuthorization: AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/ru-central1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b)__\r\nContent-Type:application/json\r\nContent-Length: 24\r\n\r\n{\"QueueName\": \"Example\"}" 2026-02-19T08:16:13.564045Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:58338) incoming connection opened 2026-02-19T08:16:13.564716Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:211: (#37,[::1]:58338) -! (POST ) 2026-02-19T08:16:13.564780Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:58338) <- (400 Bad Request, 20 bytes) 2026-02-19T08:16:13.564805Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:58338) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/ru-central1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b)__ Content-Type:application/json Content-Length: 24 {"QueueName": "Example"} 2026-02-19T08:16:13.564835Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:58338) Response: HTTP/1.1 400 Bad Request Connection: keep-alive Content-Type: text/plain Content-Length: 20 Invalid http version Http output full Invalid http version 2026-02-19T08:16:13.565293Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:235: (#37,[::1]:58338) connection closed |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::InvalidHttpStartLine5 [GOOD] Test command err: 2026-02-19T08:16:01.715340Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487153394873236:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:01.730511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:02.045022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:02.073739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:02.073844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:02.147682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:02.216649Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:02.282640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:02.282666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:02.282675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:02.282765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:02.320866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:02.491088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:02.688897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:02.693851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:02.728122Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:02.767970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.909268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:02.950561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.012997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.056995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.092523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.129828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.168136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.203038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:03.232033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:04.782673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487166279776718:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:04.782675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487166279776710:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:04.782779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:04.783219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487166279776725:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:04.783290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:04.785846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:04.799185Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487166279776724:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:04.887511Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487166279776777:3097] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:05.361430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:05.389583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:05.421260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:05.448896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:05.479516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:05.508771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T0 ... } } } 2026-02-19T08:16:13.560174Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 23ms 2026-02-19T08:16:13.560478Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:13.560505Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 25ms 2026-02-19T08:16:13.560617Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:13.560655Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:16:13.560747Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 26ms 2026-02-19T08:16:13.561449Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:13.562947Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:13.562992Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-02-19T08:16:13.563064Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 27ms 2026-02-19T08:16:13.563321Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:13.693672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487184877428409:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:13.694008Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:13.704276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487206352267290:2446]: Pool not found 2026-02-19T08:16:13.704463Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:16:13.884473Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487206352267254:2440]: Pool not found 2026-02-19T08:16:13.884801Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:16:13.893625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487206352267400:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:13.893773Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7608487206352267401:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:16:13.893835Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:13.903794Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487206352267404:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:13.903931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:14.171698Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7608487206352267398:2461]: Pool not found 2026-02-19T08:16:14.171883Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http input full "POST /Root HTTP/1.1\r\nHost:example.amazonaws.com\r\nX-Amz-Target:AmazonSQS.CreateQueue\r\nX-Amz-Date:20150830T123600Z\r\nAuthorization: AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20150830/ru-central1/service/aws4_request, SignedHeaders=host;x-amz-date, Signature=5da7c1a2acd57cee7505fc6676e4e544621c30862966e37dddb68e92efbe5d6b)__\r\nContent-Type:application/json\r\nNot a header\r\nContent-Length: 24\r\n\r\n{\"QueueName\": \"Example\"}" 2026-02-19T08:16:14.526260Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:35736) incoming connection opened 2026-02-19T08:16:14.526332Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:211: (#37,[::1]:35736) -! (POST /Root) 2026-02-19T08:16:14.526386Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:35736) <- (400 Bad Request, 19 bytes) 2026-02-19T08:16:14.526446Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:35736) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Not a header Content-Length: 24 {"QueueName": "Example"} 2026-02-19T08:16:14.526469Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:35736) Response: HTTP/1.1 400 Bad Request Connection: keep-alive Content-Type: text/plain Content-Length: 19 Invalid http header Http output full Invalid http header 2026-02-19T08:16:14.527522Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:235: (#37,[::1]:35736) connection closed >> TestKinesisHttpProxy::TestPing >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] Test command err: 2026-02-19T08:15:57.602156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487135953613314:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:57.602298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:57.649380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:57.893204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:57.893321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:57.897514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:57.939320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:58.015766Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:58.016651Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487135953613282:2081] 1771488957600198 != 1771488957600201 2026-02-19T08:15:58.054548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:58.054567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:58.054584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:58.054679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:58.214052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:58.243939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:58.249476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:58.434632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:15:58.442982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:15:58.530232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.612647Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:58.650556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.692486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-02-19T08:15:58.697009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.735998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.772785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.807265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.841405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.871455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.905857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:58.945105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:00.684970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487148838516825:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:00.684970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487148838516833:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:00.685062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:00.685397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487148838516840:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:00.685451Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:00.688569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:00.699391Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487148838516839:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:16:00.762948Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487148838516892:3098] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:01.229617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:01.259819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:01.286458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:01.318321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schem ... Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:15.559489Z node 3 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 10ms 2026-02-19T08:16:15.559549Z node 3 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:15.559576Z node 3 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-02-19T08:16:15.559637Z node 3 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 11ms 2026-02-19T08:16:15.559845Z node 3 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:15.559864Z node 3 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:16:15.559975Z node 3 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 11ms 2026-02-19T08:16:15.560231Z node 3 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:15.560416Z node 3 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:16:15.751250Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487214147767559:2440]: Pool not found 2026-02-19T08:16:15.751955Z node 3 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:16:15.853875Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608487192672928727:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:15.853953Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:16.011405Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487214147767564:2443]: Pool not found 2026-02-19T08:16:16.012151Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:16:16.015253Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487218442734984:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:16.015363Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487218442734985:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:16:16.015428Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:16.022200Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487218442734988:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:16.022273Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:16.299630Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487218442734982:2461]: Pool not found 2026-02-19T08:16:16.300275Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:16:16.534111Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:38986) incoming connection opened 2026-02-19T08:16:16.534193Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:38986) -> (POST /Root, 44 bytes) 2026-02-19T08:16:16.534319Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38c0:92cc:df7b:0:20c0:92cc:df7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 3af6c7ca-f9abf522-c5d8e0d1-ebcb8bbe 2026-02-19T08:16:16.535303Z node 3 :HTTP_PROXY INFO: http_req.cpp:1671: http request [CreateQueue] requestId [3af6c7ca-f9abf522-c5d8e0d1-ebcb8bbe] reply with status: BAD_REQUEST message: Can not parse request body from JSON 2026-02-19T08:16:16.535509Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:38986) <- (400 InvalidArgumentException, 86 bytes) 2026-02-19T08:16:16.535565Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:38986) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip Content-Length: 44 nhV ,M-MKMURPrH-IU2j 2026-02-19T08:16:16.535605Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:38986) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 3af6c7ca-f9abf522-c5d8e0d1-ebcb8bbe Content-Type: application/x-amz-json-1.1 Content-Length: 86 Http output full {"__type":"InvalidArgumentException","message":"Can not parse request body from JSON"} 2026-02-19T08:16:16.539520Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:235: (#37,[::1]:38986) connection closed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2026-02-19T08:13:30.757675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:13:30.795079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:13:30.795328Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:13:30.803299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:13:30.803551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:13:30.803803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:13:30.803915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:13:30.804009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:13:30.804138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:13:30.804228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:13:30.804339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:13:30.804447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:13:30.804583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.804702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:13:30.804805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:13:30.804926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:13:30.842886Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:13:30.843068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:13:30.843119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:13:30.843330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.843503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:13:30.843592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:13:30.843632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:13:30.843719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:13:30.843777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:13:30.843820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:13:30.843848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:13:30.844059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:13:30.844127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:13:30.844185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:13:30.844218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:13:30.844309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:13:30.844359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:13:30.844395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:13:30.844433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:13:30.844487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:13:30.844523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:13:30.844576Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:13:30.844633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:13:30.844678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:13:30.844723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:13:30.844930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:13:30.844984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:13:30.845035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:13:30.845171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:13:30.845218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.845248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:13:30.845311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:13:30.845354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:13:30.845386Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:13:30.845451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:13:30.845494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:13:30.845525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:13:30.845678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:13:30.845761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=15; 2026-02-19T08:16:13.619319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=551; 2026-02-19T08:16:13.619392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=19225; 2026-02-19T08:16:13.619461Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=19459; 2026-02-19T08:16:13.619564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=21; 2026-02-19T08:16:13.619797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=154; 2026-02-19T08:16:13.619854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=20504; 2026-02-19T08:16:13.620060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=127; 2026-02-19T08:16:13.620205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=96; 2026-02-19T08:16:13.620446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=181; 2026-02-19T08:16:13.620702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=185; 2026-02-19T08:16:13.623075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2291; 2026-02-19T08:16:13.626033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2870; 2026-02-19T08:16:13.626141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-02-19T08:16:13.626211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=24; 2026-02-19T08:16:13.626298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=20; 2026-02-19T08:16:13.626400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=53; 2026-02-19T08:16:13.626463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-02-19T08:16:13.626629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=93; 2026-02-19T08:16:13.626725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=20; 2026-02-19T08:16:13.626833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=50; 2026-02-19T08:16:13.626971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=79; 2026-02-19T08:16:13.627114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=85; 2026-02-19T08:16:13.627198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=41711; 2026-02-19T08:16:13.627427Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=5508872;raw_bytes=7830634;count=1;records=85000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113448400;raw_bytes=184369450;count=4;records=1775000} inactive {blob_bytes=393458168;raw_bytes=603156748;count=38;records=6110000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:16:13.627642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:16:13.627717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:16:13.627817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:16:13.627901Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:16:13.628076Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:13.628180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:13.628246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:13.628367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488513244;tx_id=18446744073709551615;;current_snapshot_ts=1771488811768; 2026-02-19T08:16:13.628447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:13.628519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:13.628579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:13.628751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:13.629020Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.006000s; 2026-02-19T08:16:13.632114Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:16:13.634462Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:16:13.634571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:13.634687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:816;message=tiling compaction: actualize called; 2026-02-19T08:16:13.634776Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=17; 2026-02-19T08:16:13.634872Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488513244;tx_id=18446744073709551615;;current_snapshot_ts=1771488811768; 2026-02-19T08:16:13.634940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=17;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:16:13.635012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:13.635089Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:16:13.635237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:189;event=skip_actualization;waiting=0.999000s; 2026-02-19T08:16:13.635327Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:16:13.635972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.187000s; 2026-02-19T08:16:13.636037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2182:4147];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TColumnShardTestSchema::RebootOneColdTier >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |91.1%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |91.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> TColumnShardTestSchema::HotTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.034654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.034746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.034832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.034874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.034912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.034950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.034986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.035013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.035103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.035181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.036046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.043121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.164723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.164811Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.195299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.196238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.228274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.229261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.232604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.235293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.247948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.248159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.268304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.268406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.269340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.269442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.269536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.290793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.472727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.473940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.474928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.475021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... T08:16:18.713361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-02-19T08:16:18.713819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.713967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.714444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.714572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.714866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.715148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.715233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.715349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.715545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.715631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.716838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:18.717292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:16:18.723646Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:18.723857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:18.728559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435083, Sender [1:1415:3356], Recipient [1:1415:3356]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:18.728631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5362: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:18.730673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:18.730795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:18.731758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:1415:3356], Recipient [1:1415:3356]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:18.731813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:18.732608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:18.732686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:18.732742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:18.732795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:18.736217Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274399233, Sender [1:1452:3356], Recipient [1:1415:3356]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:18.736272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5476: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:18.736317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1415:3356] sender: [1:1473:2058] recipient: [1:15:2062] 2026-02-19T08:16:18.779985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1472:3401], Recipient [1:1415:3356]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:16:18.780064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:16:18.780225Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:16:18.780551Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 322us result status StatusSuccess 2026-02-19T08:16:18.781490Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82712 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.031749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.031838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.032570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.032621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.032658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.032694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.032727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.032753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.032830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.034188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.035057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.037796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.167951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.168009Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.194582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.195437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.229012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.229279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.235104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.235434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.250958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.251154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.270436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.270529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.270757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.270815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.270865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.290776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.463397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.465930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.466017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.466103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.466190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.466246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.466329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2026-02-19T08:16:19.170388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.170518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.170996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.171108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.171429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.171612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.171720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.171840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.172053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.172148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.172659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.172953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:19.173950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:16:19.190110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:19.190303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:19.198752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435083, Sender [1:1541:3470], Recipient [1:1541:3470]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:19.198857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5362: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:19.203099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:19.203212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:19.203452Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:1541:3470], Recipient [1:1541:3470]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:19.203523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:19.203748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:19.203810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:19.203863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:19.203897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:19.204245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274399233, Sender [1:1578:3470], Recipient [1:1541:3470]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:19.204294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5476: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:19.204335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1541:3470] sender: [1:1597:2058] recipient: [1:15:2062] 2026-02-19T08:16:19.250572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1596:3513], Recipient [1:1541:3470]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:16:19.250644Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:16:19.250737Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:16:19.251011Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 260us result status StatusSuccess 2026-02-19T08:16:19.251632Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 24446 Memory: 141592 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |91.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> TColumnShardTestSchema::ForgetAfterFail >> CopyTable::WithCommitInProgress+Reboot >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestKinesisHttpProxy::GoodRequestPutRecords >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestReceiveMessage >> CopyTable::WithCommitInProgress+Reboot [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-02-19T08:16:19.487990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:19.522456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:19.522736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:19.530626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:19.530868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:19.531115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:19.531247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:19.531354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:19.531525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:19.531622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:19.531736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:19.531850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:19.531953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:19.532098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:19.532226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:19.532361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:19.561675Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:19.561937Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:19.561995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:19.562187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:19.562348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:19.562421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:19.562462Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:19.562555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:19.562609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:19.562668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:19.562695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:19.562859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:19.562923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:19.562980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:19.563014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:19.563109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:19.563157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:19.563190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:19.563217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:19.563263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:19.563294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:19.563334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:19.563380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:19.563426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:19.563458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:19.563651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:19.563709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:19.563744Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:19.563867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:19.563920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:19.563951Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:19.564003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:19.564035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:19.564063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:19.564097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:19.564126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:19.564151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:19.564269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:19.564326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... cutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.072000s; 2026-02-19T08:16:23.026989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; 2026-02-19T08:16:23.120656Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1771488980634:max} readable: {1771488980634:max} at tablet 9437184 2026-02-19T08:16:23.120859Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-02-19T08:16:23.125362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488980634:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:16:23.125526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488980634:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:16:23.126295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488980634:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-02-19T08:16:23.128126Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488980634:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:16:23.203690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771488980634:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:663:2651];trace_detailed=; 2026-02-19T08:16:23.205396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-02-19T08:16:23.205727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-02-19T08:16:23.206217Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.206417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.206804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:23.207061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.207275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.207547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:663:2651] finished for tablet 9437184 2026-02-19T08:16:23.208103Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:656:2645];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":4587647,"name":"_full_task","f":4587647,"d_finished":0,"c":0,"l":4591700,"d":4053},"events":[{"name":"bootstrap","f":4588209,"d_finished":2327,"c":1,"l":4590536,"d":2327},{"a":4590853,"name":"ack","f":4590853,"d_finished":0,"c":0,"l":4591700,"d":847},{"a":4590827,"name":"processing","f":4590827,"d_finished":0,"c":0,"l":4591700,"d":873},{"name":"ProduceResults","f":4590100,"d_finished":890,"c":2,"l":4591388,"d":890},{"a":4591398,"name":"Finish","f":4591398,"d_finished":0,"c":0,"l":4591700,"d":302}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.208232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:656:2645];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:23.208756Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:656:2645];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":4587647,"name":"_full_task","f":4587647,"d_finished":0,"c":0,"l":4592373,"d":4726},"events":[{"name":"bootstrap","f":4588209,"d_finished":2327,"c":1,"l":4590536,"d":2327},{"a":4590853,"name":"ack","f":4590853,"d_finished":0,"c":0,"l":4592373,"d":1520},{"a":4590827,"name":"processing","f":4590827,"d_finished":0,"c":0,"l":4592373,"d":1546},{"name":"ProduceResults","f":4590100,"d_finished":890,"c":2,"l":4591388,"d":890},{"a":4591398,"name":"Finish","f":4591398,"d_finished":0,"c":0,"l":4592373,"d":975}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:16:23.208873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:23.128096Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:16:23.209369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:23.209542Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-02-19T08:16:07.560341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487180136788947:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:07.560457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:07.591698Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487180198718227:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:07.591481Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:07.592794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:07.604777Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:07.763787Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:07.774793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:07.803418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:07.803556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:07.805465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:07.805563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:07.813309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:07.814277Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:16:07.816731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:07.911115Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:07.944214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:08.002105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007ac/r3tmp/yandexCabInd.tmp 2026-02-19T08:16:08.002133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007ac/r3tmp/yandexCabInd.tmp 2026-02-19T08:16:08.002358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007ac/r3tmp/yandexCabInd.tmp 2026-02-19T08:16:08.002476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:08.032700Z INFO: TTestServer started on Port 22035 GrpcPort 5726 2026-02-19T08:16:08.046945Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:5726 === TenantModeEnabled() = 1 === Init PQ - start server on port 5726 2026-02-19T08:16:08.391197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:16:08.391384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:16:08.391584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:16:08.391605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:16:08.391818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:16:08.391872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:08.402781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:16:08.402936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:16:08.403114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:16:08.403147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:16:08.403162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-02-19T08:16:08.403182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 CreateParts -> ConfigureParts 2026-02-19T08:16:08.405358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:16:08.405402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:16:08.405429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 ConfigureParts -> Propose 2026-02-19T08:16:08.407411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:16:08.407450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:16:08.407490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:16:08.407510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:16:08.412682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:16:08.413113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:16:08.413136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-02-19T08:16:08.413152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:16:08.414926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-02-19T08:16:08.415024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:16:08.417350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771488968459, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:16:08.417516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771488968459 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:16:08.417539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:16:08.418372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 Propose -> Done 2026-02-19T08:16:08.418418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-02-19T08:16:08.418579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:16:08.418616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T08:16:08.420529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:16:08.420575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:16:08.420751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at sc ... : partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.206440Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.206450Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.206500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.206510Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.206519Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.206529Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.206550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.206573Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.206582Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.206590Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.206598Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.206604Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.257843Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.257871Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.257882Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.257897Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.257907Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.311786Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.311817Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.311828Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.311843Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.311852Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.311907Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.311914Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.311923Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.311932Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.311939Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.311983Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.312006Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.312016Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.312028Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.312036Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.359735Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.359765Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.359777Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.359791Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.359801Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.415658Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.415689Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415700Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.415716Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415726Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.415785Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.415796Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415803Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.415812Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415818Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.415840Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.415848Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415855Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.415863Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.415870Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.464062Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.464092Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.464103Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.464120Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.464130Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.517180Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.517212Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517220Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.517224Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.517236Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517240Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517247Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.517252Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037891][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.517261Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517269Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037889][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.517308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.517319Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517324Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.517333Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.517339Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:16:22.565835Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:22.565869Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.565880Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:22.565898Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:22.565908Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][0][StateIdle] Try persist >> TestKinesisHttpProxy::TestRequestBadJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> CopyTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2026-02-19T08:16:22.242293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:22.276238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:22.276491Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:22.284275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:22.284528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:22.284750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:22.284889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:22.284994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:22.285091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:22.285223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:22.285342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:22.285453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:22.285584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:22.285788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:22.285900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:22.286024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:22.316894Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:22.317053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:22.317108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:22.317320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:22.317493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:22.317566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:22.317612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:22.317700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:22.317776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:22.317829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:22.317858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:22.318065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:22.318153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:22.318213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:22.318245Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:22.318337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:22.318387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:22.318425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:22.318457Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:22.318505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:22.318557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:22.318607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:22.318660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:22.318713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:22.318750Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:22.318961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:22.319016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:22.319055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:22.319215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:22.319269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:22.319298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:22.319367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:22.319413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:22.319445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:22.319485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:22.319520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:22.319549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:22.319664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:22.319760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.01ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.609ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.122ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.161ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.412ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.342ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:24.313831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:24.314121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.01ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.609ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.122ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.161ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.412ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.342ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:24.314428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.01ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.609ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.122ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.161ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.412ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.342ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:24.314828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:547:2511] finished for tablet 9437184 2026-02-19T08:16:24.315466Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:546:2510];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":2608505,"name":"_full_task","f":2608505,"d_finished":0,"c":0,"l":2620276,"d":11771},"events":[{"name":"bootstrap","f":2608767,"d_finished":1589,"c":1,"l":2610356,"d":1589},{"a":2619192,"name":"ack","f":2616847,"d_finished":2147,"c":1,"l":2618994,"d":3231},{"a":2619161,"name":"processing","f":2610513,"d_finished":4384,"c":3,"l":2618997,"d":5499},{"name":"ProduceResults","f":2609778,"d_finished":3749,"c":6,"l":2619853,"d":3749},{"a":2619859,"name":"Finish","f":2619859,"d_finished":0,"c":0,"l":2620276,"d":417},{"name":"task_result","f":2610528,"d_finished":2181,"c":2,"l":2616700,"d":2181}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.01ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.609ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.122ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.161ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.412ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.342ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:24.315600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:546:2510];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:24.316189Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:546:2510];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":2608505,"name":"_full_task","f":2608505,"d_finished":0,"c":0,"l":2621030,"d":12525},"events":[{"name":"bootstrap","f":2608767,"d_finished":1589,"c":1,"l":2610356,"d":1589},{"a":2619192,"name":"ack","f":2616847,"d_finished":2147,"c":1,"l":2618994,"d":3985},{"a":2619161,"name":"processing","f":2610513,"d_finished":4384,"c":3,"l":2618997,"d":6253},{"name":"ProduceResults","f":2609778,"d_finished":3749,"c":6,"l":2619853,"d":3749},{"a":2619859,"name":"Finish","f":2619859,"d_finished":0,"c":0,"l":2621030,"d":1171},{"name":"task_result","f":2610528,"d_finished":2181,"c":2,"l":2616700,"d":2181}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.008ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:3.01ms;WaitDuration:0ms;RawBytesRead:8392], [StepName: PREPARE_RESULT; ExecutionDuration:0.609ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.122ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.161ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.412ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:4.342ms;WaitDuration:0ms;RawBytesRead:8392]) ; 2026-02-19T08:16:24.316315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:24.301038Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-02-19T08:16:24.316358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:24.316532Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:547:2511];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> MoveTable::RenameToItself_Negative >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> MoveTable::RenameToItself_Negative [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 |91.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] Test command err: 2026-02-19T08:16:28.908645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:28.938708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:28.938967Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:28.946870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:28.947105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:28.947319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:28.947444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:28.947553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:28.947644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:28.947758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:28.947873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:28.947988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:28.948110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.948221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:28.948332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:28.948435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:28.979277Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:28.979453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:28.979504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:28.979704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:28.979867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:28.979949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:28.979992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:28.980089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:28.980151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:28.980198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:28.980241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:28.980412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:28.980479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:28.980527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:28.980562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:28.980654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:28.980707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:28.980742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:28.980767Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:28.980817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:28.980851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:28.980882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:28.980912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:28.980950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:28.980973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:28.981107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:28.981168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:28.981197Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:28.981293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:28.981320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.981336Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.981363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:28.981390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:28.981411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:28.981435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:28.981458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:16:28.981483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-02-19T08:16:28.981651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:16:28.981744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... LUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=101; 2026-02-19T08:16:29.287787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=61; 2026-02-19T08:16:29.287914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=46; 2026-02-19T08:16:29.288074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=103; 2026-02-19T08:16:29.288467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=351; 2026-02-19T08:16:29.288533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2026-02-19T08:16:29.288580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-02-19T08:16:29.288620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-02-19T08:16:29.288704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-02-19T08:16:29.288754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-02-19T08:16:29.288839Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-02-19T08:16:29.288895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:16:29.288961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-02-19T08:16:29.289030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=32; 2026-02-19T08:16:29.289107Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=27; 2026-02-19T08:16:29.289151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3612; 2026-02-19T08:16:29.289293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:16:29.289368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:16:29.289442Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:16:29.296074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:29.296155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:16:29.296243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.052000s; 2026-02-19T08:16:29.296574Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:16:29.296650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:16:29.296697Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:16:29.296751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:465;problem=Background activities cannot be started: no index at tablet; 2026-02-19T08:16:29.296875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.155000s; 2026-02-19T08:16:29.296914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; 2026-02-19T08:16:29.630632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137185782848256;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:29.630695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137185782848256;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-02-19T08:16:29.645672Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137185782848256;op_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137391944034176;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-02-19T08:16:29.645825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;this=137185782848256;op_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1771488989878;max=18446744073709551615;plan=0;src=[1:157:2179];cookie=00:0;;int_this=137391944034176;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:25;message=;tablet_id=9437184;tx_id=10; 2026-02-19T08:16:29.646191Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-02-19T08:16:29.646330Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1771488989878 at tablet 9437184, mediator 0 2026-02-19T08:16:29.646382Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-02-19T08:16:29.646720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:29.646816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:29.646877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-02-19T08:16:29.646967Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-02-19T08:16:29.661999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:257;event=new_schema;snapshot=plan_step=1771488989878;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-02-19T08:16:29.662119Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=0; 2026-02-19T08:16:29.662287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:433;method=RegisterTable;path_id={internal: 1000000185, ss: 1}; 2026-02-19T08:16:29.662363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-02-19T08:16:29.663109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:933;message=creating tiling compaction optimizer; 2026-02-19T08:16:29.683326Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:676;event=OnTieringModified;path_id=1000000185; 2026-02-19T08:16:29.713624Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-02-19T08:16:29.714518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137185782899776;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1771488989882;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=1; 2026-02-19T08:16:29.714610Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137185782899776;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1771488989882;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Rename to existing table; 2026-02-19T08:16:29.727224Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1771488989882;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;this=137185782899776;op_tx=11:TX_KIND_SCHEMA;min=1771488989882;max=18446744073709551615;plan=0;src=[1:104:2137];cookie=00:1;;fline=propose_tx.cpp:16;message=Rename to existing table;tablet_id=9437184;tx_id=11; >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestKinesisHttpProxy::DoubleCreateStream >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TTxDataShardMiniKQL::WriteEraseRead >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TestKinesisHttpProxy::TestRequestWithIAM >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TestYmqHttpProxy::TestGetQueueAttributes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> TestKinesisHttpProxy::ListShards >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::GoodRequestGetRecords >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:16:08.032171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:16:08.032259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.032573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:16:08.032623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:16:08.032658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:16:08.032704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:16:08.032738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:16:08.032767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:16:08.032840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:16:08.036528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:16:08.037361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:16:08.038224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:16:08.164721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:08.164788Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.194798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:16:08.195406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:16:08.198643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:16:08.230251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:16:08.231046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:16:08.231770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:16:08.234819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:16:08.248110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.248268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:08.268558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:08.268625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:08.270225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:08.270318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:08.270389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:08.291398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:16:08.475017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.475835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.475932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:16:08.476714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 6:38.149484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-02-19T08:16:38.149864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.149985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.150337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.150409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.150728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.150884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.150942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.151067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.151245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.151323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.151852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:16:38.152949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:16:38.179629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:38.179832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:16:38.185160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435083, Sender [1:2151:4072], Recipient [1:2151:4072]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:38.185226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5362: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:16:38.189611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:16:38.189728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:16:38.190534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:2151:4072], Recipient [1:2151:4072]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:38.190610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:16:38.191831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:16:38.191899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:16:38.191963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:16:38.192013Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:16:38.192816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274399233, Sender [1:2190:4072], Recipient [1:2151:4072]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:38.192862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5476: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:16:38.192918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2151:4072] sender: [1:2211:2058] recipient: [1:15:2062] 2026-02-19T08:16:38.293628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:2210:4119], Recipient [1:2151:4072]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:16:38.293736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:16:38.293864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:16:38.294194Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 302us result status StatusSuccess 2026-02-19T08:16:38.295021Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 26850 Memory: 156952 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |91.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> TestKinesisHttpProxy::TestListStreamConsumers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> TTxAllocatorClientTest::ZeroRange [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> KqpPg::TypeCoercionBulkUpsert >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestKinesisHttpProxy::ListShards [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsertWithOverlap+Covered [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> TestYmqHttpProxy::TestTagQueue >> TestYmqHttpProxy::TestDeleteQueue >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TestYmqHttpProxy::TestCreateQueueWithTags >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> TestYmqHttpProxy::TestListQueues >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsertWithOverlap-Covered >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TestYmqHttpProxy::TestListQueues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> KqpPg::TypeCoercionInsert+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestYmqHttpProxy::TestUntagQueue >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TestYmqHttpProxy::TestDeleteMessage >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TestYmqHttpProxy::TestPurgeQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> TestKinesisHttpProxy::ListShardsTimestamp >> TestKinesisHttpProxy::TestWrongStream >> TestYmqHttpProxy::TestDeleteMessageBatch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-02-19T08:16:28.380939Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:16:28.387332Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:16:28.387936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:28.423258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:28.423560Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:28.434631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:28.434881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:28.435128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:28.435259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:28.435365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:28.435489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:28.435624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:28.435730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:28.435828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:28.435937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.436075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:28.436196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:28.436335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:28.440025Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:16:28.472039Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:28.472256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:28.472311Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:28.472551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:28.472735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:28.472808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:28.472855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:28.472964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:28.473031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:28.473075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:28.473106Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:28.473291Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:28.473366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:28.473406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:28.473449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:28.473555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:28.473616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:28.473680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:28.473757Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:28.473831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:28.473904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:28.473947Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:28.473998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:28.474041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:28.474077Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:28.474300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:28.474361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:28.474391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:28.474534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:28.474598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.474657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:28.474741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:28.474795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:28.474831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:28.474887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:28.474939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... _FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.314536Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.314884Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:17:01.315292Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.315691Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.316273Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [5:589:2569] finished for tablet 9437184 2026-02-19T08:17:01.317085Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[5:588:2568];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":33700075,"name":"_full_task","f":33700075,"d_finished":0,"c":0,"l":33713930,"d":13855},"events":[{"name":"bootstrap","f":33700511,"d_finished":1821,"c":1,"l":33702332,"d":1821},{"a":33712410,"name":"ack","f":33708860,"d_finished":3317,"c":1,"l":33712177,"d":4837},{"a":33712381,"name":"processing","f":33702548,"d_finished":6298,"c":3,"l":33712184,"d":7847},{"name":"ProduceResults","f":33701621,"d_finished":5343,"c":6,"l":33713315,"d":5343},{"a":33713325,"name":"Finish","f":33713325,"d_finished":0,"c":0,"l":33713930,"d":605},{"name":"task_result","f":33702574,"d_finished":2890,"c":2,"l":33708589,"d":2890}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.317227Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[5:588:2568];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:17:01.318190Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[5:588:2568];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":33700075,"name":"_full_task","f":33700075,"d_finished":0,"c":0,"l":33714857,"d":14782},"events":[{"name":"bootstrap","f":33700511,"d_finished":1821,"c":1,"l":33702332,"d":1821},{"a":33712410,"name":"ack","f":33708860,"d_finished":3317,"c":1,"l":33712177,"d":5764},{"a":33712381,"name":"processing","f":33702548,"d_finished":6298,"c":3,"l":33712184,"d":8774},{"name":"ProduceResults","f":33701621,"d_finished":5343,"c":6,"l":33713315,"d":5343},{"a":33713325,"name":"Finish","f":33713325,"d_finished":0,"c":0,"l":33714857,"d":1532},{"name":"task_result","f":33702574,"d_finished":2890,"c":2,"l":33708589,"d":2890}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.011ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.436ms;WaitDuration:0ms;RawBytesRead:232], [StepName: PREPARE_RESULT; ExecutionDuration:0.478ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.206ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.019ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.108ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.203ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.461ms;WaitDuration:0ms;RawBytesRead:232]) ; 2026-02-19T08:17:01.318366Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:17:01.300909Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-02-19T08:17:01.318431Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:17:01.318651Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-02-19T08:16:23.722828Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:16:23.729440Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:16:23.730067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:23.766349Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:23.766618Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:23.774779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:23.775058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:23.775319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:23.775449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:23.775556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:23.775703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:23.775812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:23.775938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:23.776043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:23.776156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:23.776285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:23.776421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:23.776570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:23.779759Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:16:23.808029Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:23.808187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:23.808248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:23.808474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:23.808626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:23.808702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:23.808747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:23.808832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:23.808894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:23.808937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:23.808969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:23.809185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:23.809264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:23.809303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:23.809332Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:23.809413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:23.809464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:23.809516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:23.809573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:23.809646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:23.809690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:23.809738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:23.809778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:23.809814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:23.809845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:23.810076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:23.810217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:23.810254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:23.810415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:23.810467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:23.810500Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:23.810583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:23.810633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:23.810669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:23.810719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:23.810779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... ecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.894743Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.894968Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:17:02.895175Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.895374Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.895815Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [5:720:2701] finished for tablet 9437184 2026-02-19T08:17:02.896572Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[5:719:2700];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":39796293,"name":"_full_task","f":39796293,"d_finished":0,"c":0,"l":39812430,"d":16137},"events":[{"name":"bootstrap","f":39796601,"d_finished":1608,"c":1,"l":39798209,"d":1608},{"a":39811482,"name":"ack","f":39806892,"d_finished":4237,"c":2,"l":39811311,"d":5185},{"a":39811468,"name":"processing","f":39798437,"d_finished":8221,"c":5,"l":39811316,"d":9183},{"name":"ProduceResults","f":39797579,"d_finished":5919,"c":9,"l":39811932,"d":5919},{"a":39811940,"name":"Finish","f":39811940,"d_finished":0,"c":0,"l":39812430,"d":490},{"name":"task_result","f":39798466,"d_finished":3838,"c":3,"l":39806638,"d":3838}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.896676Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[5:719:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:17:02.897398Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[5:719:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":39796293,"name":"_full_task","f":39796293,"d_finished":0,"c":0,"l":39813266,"d":16973},"events":[{"name":"bootstrap","f":39796601,"d_finished":1608,"c":1,"l":39798209,"d":1608},{"a":39811482,"name":"ack","f":39806892,"d_finished":4237,"c":2,"l":39811311,"d":6021},{"a":39811468,"name":"processing","f":39798437,"d_finished":8221,"c":5,"l":39811316,"d":10019},{"name":"ProduceResults","f":39797579,"d_finished":5919,"c":9,"l":39811932,"d":5919},{"a":39811940,"name":"Finish","f":39811940,"d_finished":0,"c":0,"l":39813266,"d":1326},{"name":"task_result","f":39798466,"d_finished":3838,"c":3,"l":39806638,"d":3838}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:2.059ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.562ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.217ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.028ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.151ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.197ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:3.224ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:17:02.897553Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:17:02.878150Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-02-19T08:17:02.897622Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:17:02.897853Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2026-02-19T08:15:11.246531Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-02-19T08:15:11.254176Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-02-19T08:15:11.262644Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-02-19T08:15:11.298073Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.321132Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-02-19T08:15:11.351046Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.351283Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.351343Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.351478Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-02-19T08:15:11.351603Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.351726Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-02-19T08:15:11.351889Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-02-19T08:15:11.358765Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-02-19T08:15:11.360409Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.360484Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:15:11.360614Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-02-19T08:15:11.360671Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2026-02-19T08:16:07.979937Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487180151087880:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:07.979996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:08.042141Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:08.054455Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487185108495236:2202];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:08.072652Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:08.073187Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:08.290643Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:08.302712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:08.352771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:08.352916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:08.354118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:08.354218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:08.408110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:08.410510Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:16:08.416082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:08.474570Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.538333Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:08.580651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:08.711840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0007ab/r3tmp/yandexJxTgoO.tmp 2026-02-19T08:16:08.711872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0007ab/r3tmp/yandexJxTgoO.tmp 2026-02-19T08:16:08.712035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0007ab/r3tmp/yandexJxTgoO.tmp 2026-02-19T08:16:08.712132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:08.743234Z INFO: TTestServer started on Port 9534 GrpcPort 15830 PQClient connected to localhost:15830 2026-02-19T08:16:09.014551Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:09.062977Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:09.085747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:09.192744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:12.090499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925671:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.090733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.091254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925698:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.091260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925699:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.091320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.091641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925703:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.091713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.095458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:12.099441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925736:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.099522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.100249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487201625925738:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.100328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:12.126554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487201625925702:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:16:12.383023Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487201625925780:3027] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:12.411648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:12.471981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:12.529689Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487201625925799:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:16:12.532670Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MTU0MjkyMWUtODQ1ODE0OGMtOTA1MzU3NDctZGE2OWIzMDE=, ActorId: [1:7608487201625925659:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfh38q1scsvk6kwd8f20xk, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:16:12.539015Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position ... 2358: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:03.614289Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.614305Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.614322Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.614335Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][3][StateIdle] Try persist 2026-02-19T08:17:03.614779Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:03.614806Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.614818Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.614833Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.614844Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:17:03.624561Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:03.624592Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.624603Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.624617Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.624629Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:17:03.713756Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7608487371756787024:2146], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:17:03.713909Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7608487371756787024:2146], cacheItem# { Subscriber: { Subscriber: [7:7608487371756787064:2164] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771489012111 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:17:03.714152Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7608487423296397617:4374], recipient# [7:7608487423296397616:2546], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:17:03.714629Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:03.714659Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.714684Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.714703Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.714716Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][3][StateIdle] Try persist 2026-02-19T08:17:03.715025Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7608487371756787024:2146], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:17:03.715097Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:03.715118Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.715128Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.715137Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7608487371756787024:2146], cacheItem# { Subscriber: { Subscriber: [7:7608487388936657617:3168] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1771489015926 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:17:03.715140Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.715150Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:17:03.715189Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7608487371756787024:2146], cacheItem# { Subscriber: { Subscriber: [7:7608487388936657430:3041] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1771489015653 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:17:03.715685Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7608487423296397622:4377], recipient# [7:7608487423296397620:2537], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:17:03.717701Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7608487371756787024:2146], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-02-19T08:17:03.717817Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7608487371756787024:2146], cacheItem# { Subscriber: { Subscriber: [7:7608487388936657617:3168] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1771489015926 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:17:03.718001Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7608487423296397629:4379], recipient# [7:7608487423296397628:2547], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2026-02-19T08:17:03.724858Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:03.724894Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.724910Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:03.724927Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:03.724939Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-02-19T08:16:12.059525Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:16:12.064326Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:16:12.064847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:12.098953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:12.099254Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:12.107498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:12.107758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:12.108015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:12.108131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:12.108249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:12.108386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:12.108518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:12.108633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:12.108758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:12.108868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.109005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:12.109134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:12.109322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:12.112434Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828684, Sender [1:107:2139], Recipient [1:129:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:16:12.141259Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:12.141661Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:12.141746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:12.141944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.142147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:12.142235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:12.142291Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:12.142411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:12.142485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:12.142550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:12.142583Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:12.142790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.142861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:12.142905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:12.142936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:12.143038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:12.143103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:12.143159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:12.143217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:12.143294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:12.143334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:12.143365Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:12.143408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:12.143467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:12.143522Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:12.143718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:12.143770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:12.143799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:12.143950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:12.143999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.144036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.144096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:12.144147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:12.144186Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:12.144240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:16:12.144306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... Name: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.658431Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.658709Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:16:51.658920Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.659127Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.659587Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [5:618:2623] finished for tablet 9437184 2026-02-19T08:16:51.660388Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":40160776,"name":"_full_task","f":40160776,"d_finished":0,"c":0,"l":40176335,"d":15559},"events":[{"name":"bootstrap","f":40161045,"d_finished":1495,"c":1,"l":40162540,"d":1495},{"a":40175334,"name":"ack","f":40170445,"d_finished":4473,"c":2,"l":40175142,"d":5474},{"a":40175316,"name":"processing","f":40162764,"d_finished":7849,"c":5,"l":40175150,"d":8868},{"name":"ProduceResults","f":40161978,"d_finished":5976,"c":9,"l":40175827,"d":5976},{"a":40175834,"name":"Finish","f":40175834,"d_finished":0,"c":0,"l":40176335,"d":501},{"name":"task_result","f":40162788,"d_finished":3233,"c":3,"l":40170169,"d":3233}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.660508Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:16:51.661265Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults"],"t":0.001},{"events":["f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":40160776,"name":"_full_task","f":40160776,"d_finished":0,"c":0,"l":40177246,"d":16470},"events":[{"name":"bootstrap","f":40161045,"d_finished":1495,"c":1,"l":40162540,"d":1495},{"a":40175334,"name":"ack","f":40170445,"d_finished":4473,"c":2,"l":40175142,"d":6385},{"a":40175316,"name":"processing","f":40162764,"d_finished":7849,"c":5,"l":40175150,"d":9779},{"name":"ProduceResults","f":40161978,"d_finished":5976,"c":9,"l":40175827,"d":5976},{"a":40175834,"name":"Finish","f":40175834,"d_finished":0,"c":0,"l":40177246,"d":1412},{"name":"task_result","f":40162788,"d_finished":3233,"c":3,"l":40170169,"d":3233}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.009ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:1.773ms;WaitDuration:0ms;RawBytesRead:464], [StepName: PREPARE_RESULT; ExecutionDuration:0.585ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.179ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.02ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.16ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:0.208ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:2.934ms;WaitDuration:0ms;RawBytesRead:464]) ; 2026-02-19T08:16:51.661417Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:16:51.642750Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-02-19T08:16:51.661476Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:16:51.661674Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TestKinesisHttpProxy::TestWrongStream2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 2758199537202038208 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2026-02-19T08:14:56.279661Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-02-19T08:14:56.503142Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2026-02-19T08:14:57.478796Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2026-02-19T08:14:59.159201Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2026-02-19T08:14:59.302368Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:14:59.304075Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12238861084127024519] 2026-02-19T08:14:59.414210Z 1 00h02m15.161024s :BS_PROXY ERROR: StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3 Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4). GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3" ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: ... 1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 3 2026-02-19T08:16:22.996093Z 1 00h25m01.113584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2026-02-19T08:16:23.337910Z 1 00h25m11.114096s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 0 2026-02-19T08:16:24.882960Z 9 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:116364:350] ServerId# [1:117411:178] TabletId# 72057594037932033 PipeClientId# [9:116364:350] 2026-02-19T08:16:24.883311Z 8 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:147022:18] ServerId# [1:147031:4107] TabletId# 72057594037932033 PipeClientId# [8:147022:18] 2026-02-19T08:16:24.883461Z 7 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:145966:18] ServerId# [1:145972:3979] TabletId# 72057594037932033 PipeClientId# [7:145966:18] 2026-02-19T08:16:24.883622Z 6 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:123016:18] ServerId# [1:123022:1015] TabletId# 72057594037932033 PipeClientId# [6:123016:18] 2026-02-19T08:16:24.883806Z 5 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:143048:18] ServerId# [1:143055:3597] TabletId# 72057594037932033 PipeClientId# [5:143048:18] 2026-02-19T08:16:24.883941Z 4 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:151998:18] ServerId# [1:152007:4708] TabletId# 72057594037932033 PipeClientId# [4:151998:18] 2026-02-19T08:16:24.884198Z 3 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:141956:18] ServerId# [1:141965:3474] TabletId# 72057594037932033 PipeClientId# [3:141956:18] 2026-02-19T08:16:24.884381Z 2 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:151030:18] ServerId# [1:151036:4599] TabletId# 72057594037932033 PipeClientId# [2:151030:18] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 7 2026-02-19T08:16:27.753178Z 1 00h26m21.153072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999829} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Starting nodes Start compaction 1 Start checking ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2026-02-19T08:16:08.095043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487184218781699:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:08.097225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:08.162270Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487185649432133:2100];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:08.163522Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:08.192880Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:16:08.195130Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:08.412147Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:08.415301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:08.467805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:08.467912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:08.471988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:08.472055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:08.502114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:08.537590Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:16:08.539013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:08.601531Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:08.615243Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:08.767962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:08.870255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00060e/r3tmp/yandex88HJsa.tmp 2026-02-19T08:16:08.870311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00060e/r3tmp/yandex88HJsa.tmp 2026-02-19T08:16:08.870508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00060e/r3tmp/yandex88HJsa.tmp 2026-02-19T08:16:08.870598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:08.912078Z INFO: TTestServer started on Port 9546 GrpcPort 10300 2026-02-19T08:16:09.101569Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:10300 2026-02-19T08:16:09.159101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:09.189778Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:09.278689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:11.729132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487197103684908:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:11.729260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:11.729638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487197103684938:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:11.729676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:11.729748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487197103684922:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:11.734380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:11.780455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487197103684940:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:16:11.969137Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487197103685028:3040] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:11.999243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:12.104443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:12.137367Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487197103685048:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:16:12.137918Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NjBhNjliZjgtZWY3NjA3ZWUtYjUwOWZjYjMtNjFkODJiNQ==, ActorId: [1:7608487197103684905:2334], ActorState: ExecuteState, LegacyTraceId: 01khtfh2y2d15bk3x93w5rg18s, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:16:12.140272Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:16:12.238443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608487201398652784:3373] 2026-02-19T08:16:13.094264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487184218781699:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:13.094376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:13.149211Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487185649432133:2100];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:13.149300Z node 2 :METADATA_PROVIDER ERROR: log.c ... : partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004265Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004273Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.004294Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:16:49.004303Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004310Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004325Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.004355Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:16:49.004365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004374Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004384Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004391Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][3][StateIdle] Try persist 2026-02-19T08:16:49.004416Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:16:49.004424Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004432Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004447Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004454Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.004477Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.004486Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004494Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004506Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004512Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037900][Partition][0][StateIdle] Try persist 2026-02-19T08:16:49.004532Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.004541Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004548Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.004558Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.004567Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037904][Partition][0][StateIdle] Try persist 2026-02-19T08:16:49.033851Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.033892Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.033907Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.033928Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.033941Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:16:49.105734Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:16:49.105773Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.105788Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.105805Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.105817Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][3][StateIdle] Try persist 2026-02-19T08:16:49.105869Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:16:49.105880Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.105888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.105897Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.105917Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037899][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.105971Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:16:49.105982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.105991Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106002Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106009Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][3][StateIdle] Try persist 2026-02-19T08:16:49.106033Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:16:49.106043Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106050Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106061Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106070Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.106094Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:16:49.106104Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106113Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106122Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106130Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][4][StateIdle] Try persist 2026-02-19T08:16:49.106151Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:16:49.106161Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106168Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106180Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106188Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037903][Partition][3][StateIdle] Try persist 2026-02-19T08:16:49.106210Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.106220Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106229Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106239Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106248Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037900][Partition][0][StateIdle] Try persist 2026-02-19T08:16:49.106270Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.106280Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106288Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.106300Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.106307Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037904][Partition][0][StateIdle] Try persist 2026-02-19T08:16:49.134446Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:16:49.134486Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.134499Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:16:49.134516Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:16:49.134529Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] |91.2%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::TestCounters |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/storage/ydb-core-kqp-ut-olap-storage |91.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/storage/ydb-core-kqp-ut-olap-storage |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/storage/ydb-core-kqp-ut-olap-storage |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/operations/ydb-core-kqp-ut-olap-operations |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/operations/ydb-core-kqp-ut-olap-operations |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |91.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/operations/ydb-core-kqp-ut-olap-operations |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |91.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |91.2%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TestYmqHttpProxy::TestSendMessageBatch >> KqpPg::EmptyQuery+useSink >> KqpPg::NoTableQuery+useSink |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> ExternalBlobsMultipleChannels::Simple >> PQCountersLabeled::Partition >> TPartitionTests::OldPlanStep >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: 2026-02-19T08:15:19.016695Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486974537936652:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:19.016782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:19.534373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:19.534467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:19.613526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:19.680926Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:19.690484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486970242969158:2081] 1771488918936376 != 1771488918936379 2026-02-19T08:15:19.698779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:19.789808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:19.990323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:19.990359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:19.990365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:19.990443Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:20.025843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:20.744296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:20.850748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.145443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.365921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.477353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.697505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486991717807514:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.697842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.698407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486991717807524:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.698450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.013207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486974537936652:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:24.013306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:24.109613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.165409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.211839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.253923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.291514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.332153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.369749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.435236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.537091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486996012775690:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.537174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.537387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486996012775695:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.537427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486996012775696:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.537524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.541244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:24.553826Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486996012775699:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:15:24.650968Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486996012775750:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:26.406540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.308036Z no ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:06.401699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:06.485449Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:06.486737Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608487174456687334:2081] 1771488966365946 != 1771488966365949 2026-02-19T08:16:06.504528Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:06.504607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:06.540170Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:06.608934Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:06.608954Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:06.608959Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:06.609024Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:06.645572Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:07.140592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:07.160602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:07.239159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:07.392872Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:07.429649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:07.493456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.374088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487191636558392:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.374206Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.374540Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487191636558401:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.374627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.462854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.501495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.538657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.573646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.604721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.650068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.688433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.738580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:10.837333Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487191636559279:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.837412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.837494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487191636559284:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.837639Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487191636559286:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.837700Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:10.841589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:10.859401Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487191636559287:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:16:10.964759Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487191636559339:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:11.374657Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487174456687408:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:11.374732Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:12.996069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:21.467708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:16:21.467729Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> TPartitionTests::IncorrectRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-02-19T08:16:14.192822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487209904523929:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:14.199157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:14.456973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:14.483965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:14.484104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:14.534689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:14.592604Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487209904523892:2081] 1771488974186214 != 1771488974186217 2026-02-19T08:16:14.609160Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:14.678809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:14.714749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:14.714785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:14.714791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:14.714901Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:14.949083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:14.962803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:16:15.161886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:15.170997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:15.179367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:15.200396Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:15.205804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:16:15.275937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.460705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.573081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:16:15.578116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.636068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:16:15.641348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.681905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.754800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.821016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.877696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.953812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.043417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.249915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487227084394742:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.250169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.254176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487227084394754:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.254294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487227084394755:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.254480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.259581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:18.290779Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487227084394758:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:18.389152Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487227084394809:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:18.885355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.929040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.976192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_tabl ... ata Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:06.970782Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-02-19T08:17:06.970854Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 21ms 2026-02-19T08:17:06.971165Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:06.972777Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:06.972817Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 19ms 2026-02-19T08:17:06.973241Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:06.973278Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:17:06.973400Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 23ms 2026-02-19T08:17:06.973902Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:07.126012Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608487434321628282:2440]: Pool not found 2026-02-19T08:17:07.126204Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:17:07.410602Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608487434321628284:2441]: Pool not found 2026-02-19T08:17:07.410816Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:07.414865Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608487438616595706:2462], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:07.414953Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7608487438616595707:2463], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:07.415045Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:07.415452Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608487438616595710:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:07.415500Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:07.764718Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608487438616595704:2461]: Pool not found 2026-02-19T08:17:07.764958Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:17:07.942401Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:45710) incoming connection opened 2026-02-19T08:17:07.942484Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:45710) -> (POST /Root, 3 bytes) 2026-02-19T08:17:07.942684Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d809:56e6:7f7b:0:c009:56e6:7f7b:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: a49c3f7c-cde617c9-af304ea4-75b6a58 2026-02-19T08:17:07.942978Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [UnknownMethodName] requestId [a49c3f7c-cde617c9-af304ea4-75b6a58] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2026-02-19T08:17:07.943173Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:45710) <- (400 InvalidAction, 76 bytes) 2026-02-19T08:17:07.943222Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:45710) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2026-02-19T08:17:07.943255Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:45710) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: a49c3f7c-cde617c9-af304ea4-75b6a58 Content-Type: application/x-amz-json-1.1 Content-Length: 76 2026-02-19T08:17:07.943339Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:45710) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPartitionTests::IncorrectRange [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> Cache::Test4 [GOOD] >> Cache::Test5 >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> TColumnShardTestSchema::RebootOneColdTier [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2026-02-19T08:16:15.062892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487216242295289:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:15.063135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:15.388198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:15.388301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:15.441621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:15.450121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:15.653912Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:15.683622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:15.683667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:15.683674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:15.683681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:15.683755Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:15.970312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:16.061850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:16.306980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:16.342548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:16.346758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:16.380391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:16:16.658287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.810910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.878239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:16:16.884205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.975951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.017991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.063874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.110893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.147859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.197851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.244846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.322057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487233422165884:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.322217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.322574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487233422165896:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.322656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487233422165897:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.322766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.326979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:19.354116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487233422165900:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:19.449056Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487233422165951:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:19.964857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.016884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.059935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487216242295289:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:20.060008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:20.061846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.135299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is u ... 7: Request [5a1cb15c-83a15f1a-1c6026d7-69dcec26] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "5a1cb15c-83a15f1a-1c6026d7-69dcec26" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "5a1cb15c-83a15f1a-1c6026d7-69dcec26" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-02-19T08:17:09.503962Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:431: http request [DeleteMessageBatch] requestId [5a1cb15c-83a15f1a-1c6026d7-69dcec26] Got succesfult GRPC response. Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2026-02-19T08:17:09.504143Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [DeleteMessageBatch] requestId [5a1cb15c-83a15f1a-1c6026d7-69dcec26] reply ok 2026-02-19T08:17:09.504268Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1727: http request [DeleteMessageBatch] requestId [5a1cb15c-83a15f1a-1c6026d7-69dcec26] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 197 SourceAddress: 9892:e270:c67b:0:8092:e270:c67b:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2026-02-19T08:17:09.504372Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:47548) <- (200 , 44 bytes) 2026-02-19T08:17:09.504461Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:47548) connection closed 2026-02-19T08:17:09.505353Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:47556) incoming connection opened 2026-02-19T08:17:09.505423Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:47556) -> (POST /Root, 106 bytes) 2026-02-19T08:17:09.505534Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f87f:c771:c67b:0:e07f:c771:c67b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: ab044b4f-3efe779e-de6b545d-d6adbe3d 2026-02-19T08:17:09.505965Z node 7 :HTTP_PROXY INFO: http_req.cpp:573: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] got new request from [f87f:c771:c67b:0:e07f:c771:c67b:0] 2026-02-19T08:17:09.506372Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:507: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2026-02-19T08:17:09.506395Z node 7 :HTTP_PROXY INFO: http_req.cpp:331: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:17:09.506494Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: ab044b4f-3efe779e-de6b545d-d6adbe3d 2026-02-19T08:17:09.506585Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2026-02-19T08:17:09.506593Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Request proxy started 2026-02-19T08:17:09.506687Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-02-19T08:17:09.506726Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 3ms 2026-02-19T08:17:09.506864Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-02-19T08:17:09.506864Z node 7 :SQS DEBUG: service.cpp:761: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2026-02-19T08:17:09.506894Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2026-02-19T08:17:09.506951Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 3ms 2026-02-19T08:17:09.506957Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Get configuration duration: 0ms 2026-02-19T08:17:09.506964Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2026-02-19T08:17:09.507053Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Send get leader node request to sqs service for cloud4/000000000000000101v0 2026-02-19T08:17:09.507066Z node 7 :SQS DEBUG: queue_leader.cpp:1915: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/0] 2026-02-19T08:17:09.507073Z node 7 :SQS DEBUG: service.cpp:581: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Leader node for queue [cloud4/000000000000000101v0] is 7 2026-02-19T08:17:09.507091Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Got leader node for queue response. Node id: 7. Status: 0 2026-02-19T08:17:09.507154Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" 2026-02-19T08:17:09.507214Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" 2026-02-19T08:17:09.507258Z node 7 :SQS DEBUG: action.h:131: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Request started. Actor: [7:7608487446874178778:3933] 2026-02-19T08:17:09.507296Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7608487446874178778:3933] 2026-02-19T08:17:09.507308Z node 7 :SQS DEBUG: service.cpp:754: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2026-02-19T08:17:09.507335Z node 7 :SQS DEBUG: action.h:625: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Get configuration duration: 0ms 2026-02-19T08:17:09.507348Z node 7 :SQS TRACE: action.h:645: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Got configuration. Root url: http://ghrun-fkrokuvqke.auto.internal:8771, Shards: 4, Fail: 0 2026-02-19T08:17:09.507368Z node 7 :SQS TRACE: action.h:660: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2026-02-19T08:17:09.507378Z node 7 :SQS TRACE: action.h:425: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] DoRoutine 2026-02-19T08:17:09.507430Z node 7 :SQS TRACE: queue_leader.cpp:2426: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2026-02-19T08:17:09.507444Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2026-02-19T08:17:09.507453Z node 7 :SQS DEBUG: queue_leader.cpp:1164: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] No known messages in this shard. Skip attempt to add messages to infly 2026-02-19T08:17:09.507461Z node 7 :SQS DEBUG: queue_leader.cpp:1170: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Already tried to add messages to infly 2026-02-19T08:17:09.507499Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2026-02-19T08:17:09.507547Z node 7 :SQS TRACE: action.h:262: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } } 2026-02-19T08:17:09.507619Z node 7 :SQS TRACE: proxy_service.h:35: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Sending sqs response: { ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-02-19T08:17:09.507685Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 0ms 2026-02-19T08:17:09.507700Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-02-19T08:17:09.507720Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7608487446874178778:3933]. Found: 1 2026-02-19T08:17:09.507736Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7608487446874178777:2548]: ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-02-19T08:17:09.507836Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] HandleResponse: { ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2026-02-19T08:17:09.507904Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [ab044b4f-3efe779e-de6b545d-d6adbe3d] Sending reply from proxy actor: { ReceiveMessage { RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" } RequestId: "ab044b4f-3efe779e-de6b545d-d6adbe3d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-02-19T08:17:09.508019Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:431: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] Got succesfult GRPC response. 2026-02-19T08:17:09.508063Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] reply ok 2026-02-19T08:17:09.508128Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1727: http request [ReceiveMessage] requestId [ab044b4f-3efe779e-de6b545d-d6adbe3d] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 154 SourceAddress: f87f:c771:c67b:0:e07f:c771:c67b:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2026-02-19T08:17:09.508197Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:47556) <- (200 , 2 bytes) 2026-02-19T08:17:09.508276Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:47556) connection closed Http output full {} >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> Cache::Test5 [GOOD] >> Cache::Test6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489579.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=171489579.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489579.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=151489579.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488379.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=151488379.000000s;Name=;Codec=}; 2026-02-19T08:16:21.720244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:21.752817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:21.753125Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:21.759653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:21.759835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:21.760043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:21.760168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:21.760282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:21.760409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:21.760522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:21.760654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:21.760813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:21.760935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:21.761044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:21.761179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:21.761287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:21.794994Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:21.795167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:21.795236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:21.795444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:21.795608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:21.795685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:21.795734Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:21.795832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:21.795896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:21.795942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:21.795975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:21.796158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:21.796237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:21.796281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:21.796322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:21.796429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:21.796491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:21.796545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:21.796575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:21.796629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:21.796669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:21.796699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:21.796762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:21.796806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:21.796838Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:21.797064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:21.797143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:21.797180Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:21.797374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:21.797419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:21.797448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:21.797505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:21.797585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:21.797621Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:21.797666Z node 1 :TX_COLUM ... ard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-02-19T08:17:10.800486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=64; 2026-02-19T08:17:10.800526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2616; 2026-02-19T08:17:10.800559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2719; 2026-02-19T08:17:10.800614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-02-19T08:17:10.800686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=28; 2026-02-19T08:17:10.800720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3228; 2026-02-19T08:17:10.800821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=54; 2026-02-19T08:17:10.800920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2026-02-19T08:17:10.801014Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=60; 2026-02-19T08:17:10.801112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=59; 2026-02-19T08:17:10.802853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1699; 2026-02-19T08:17:10.804524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1610; 2026-02-19T08:17:10.804591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-02-19T08:17:10.804625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-02-19T08:17:10.804650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-02-19T08:17:10.804700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2026-02-19T08:17:10.804726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-02-19T08:17:10.804780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2026-02-19T08:17:10.804821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-02-19T08:17:10.804868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2026-02-19T08:17:10.804924Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=34; 2026-02-19T08:17:10.805102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=157; 2026-02-19T08:17:10.805130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27705; 2026-02-19T08:17:10.805221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:17:10.805298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:17:10.805335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:17:10.805402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:17:10.813199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=1; 2026-02-19T08:17:10.813329Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:10.813401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=2; 2026-02-19T08:17:10.813465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488719247;tx_id=18446744073709551615;;current_snapshot_ts=1771489007134; 2026-02-19T08:17:10.813506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:10.813553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:10.813584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:10.813661Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:10.813877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.133000s; 2026-02-19T08:17:10.821163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:17:10.821293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:17:10.821342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:10.821445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=2; 2026-02-19T08:17:10.821511Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488719247;tx_id=18446744073709551615;;current_snapshot_ts=1771489007134; 2026-02-19T08:17:10.821564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:10.821610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:10.821651Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:10.821759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:10.822378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.106000s; 2026-02-19T08:17:10.822425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 >> TPartitionTests::GetPartitionWriteInfoError >> TSentinelTests::PDiskErrorState >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> TestKinesisHttpProxy::ListShardsToken >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> Cache::Test6 [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream >> TestKinesisHttpProxy::TestWrongRequest >> TSentinelTests::PDiskRackGuardHalfRack >> TSentinelTests::Smoke >> TSentinelTests::PDiskPileGuardHalfPile >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test6 [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TestYmqHttpProxy::TestListQueueTags >> TestKinesisHttpProxy::TestCounters [GOOD] |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/ydb-core-blobstorage-ut_blobstorage-ut_ddisk |91.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/ydb-core-blobstorage-ut_blobstorage-ut_ddisk |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/ydb-core-blobstorage-ut_blobstorage-ut_ddisk >> TColumnShardTestSchema::RebootColdTiers [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489573.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489573.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489573.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489573.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489573.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489573.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488373.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489573.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151489573.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488373.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488373.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151488373.000000s;Name=;Codec=}; 2026-02-19T08:16:13.829543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:13.864266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:13.864654Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:13.890858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:13.891219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:13.891529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:13.891702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:13.891827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:13.891965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:13.892126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:13.892313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:13.892481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:13.892637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:13.892760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:13.892880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:13.893016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:13.958214Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:13.958420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:13.958500Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:13.958705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:13.958878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:13.958957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:13.959006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:13.959101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:13.959164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:13.959208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:13.959236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:13.959401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:13.959463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:13.959510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:13.959548Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:13.959666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:13.959736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:13.959785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:13.959835Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:13.959895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:13.959937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:13.959974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:13.960023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:13.960065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:13.960098Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:13.960305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:13.960387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:13.960427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:13.960554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:13.960608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:13.960640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:13.960694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:13.960739Z node 1 :TX_COLUMNSHARD WARN: l ... =12; 2026-02-19T08:17:15.668191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=88; 2026-02-19T08:17:15.668230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3231; 2026-02-19T08:17:15.668289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3350; 2026-02-19T08:17:15.668364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=21; 2026-02-19T08:17:15.668437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=29; 2026-02-19T08:17:15.668483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4051; 2026-02-19T08:17:15.668627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=90; 2026-02-19T08:17:15.668763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=84; 2026-02-19T08:17:15.668893Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=81; 2026-02-19T08:17:15.669024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=86; 2026-02-19T08:17:15.671709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2623; 2026-02-19T08:17:15.674185Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2388; 2026-02-19T08:17:15.674272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-02-19T08:17:15.674332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-02-19T08:17:15.674377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-02-19T08:17:15.674445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-02-19T08:17:15.674502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-02-19T08:17:15.674595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2026-02-19T08:17:15.674634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-02-19T08:17:15.674781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-02-19T08:17:15.674882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-02-19T08:17:15.675208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=277; 2026-02-19T08:17:15.675248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21386; 2026-02-19T08:17:15.675388Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:17:15.675505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:17:15.675560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:17:15.675628Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:17:15.695782Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=1; 2026-02-19T08:17:15.695940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:15.696028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=2; 2026-02-19T08:17:15.696098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488723480;tx_id=18446744073709551615;;current_snapshot_ts=1771489011367; 2026-02-19T08:17:15.696177Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:15.696239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:15.696287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:15.696379Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:15.696607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.189000s; 2026-02-19T08:17:15.698685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:17:15.698795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:17:15.698833Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:15.698944Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=2; 2026-02-19T08:17:15.699033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488723480;tx_id=18446744073709551615;;current_snapshot_ts=1771489011367; 2026-02-19T08:17:15.699089Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:15.699137Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:15.699198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:15.699307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:15.699862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.196000s; 2026-02-19T08:17:15.699892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2026-02-19T08:16:15.662354Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487214957245776:2137];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:15.662425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:15.988728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:16.026343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:16.026423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:16.116615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:16.190366Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:16.193948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487214957245677:2081] 1771488975656451 != 1771488975656454 2026-02-19T08:16:16.215577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:16.379437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:16.379471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:16.379481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:16.379586Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:16.672247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:16.845336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:17.171441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:17.186556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:17.195326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:17.281011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.482035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.551192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:16:17.564359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.623844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:16:17.631197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.721250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.783085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.858085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.946467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.024017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.093469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.263652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487236432083837:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:20.263666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487236432083829:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:20.263797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:20.266053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487236432083844:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:20.266176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:20.267994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:20.287765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487236432083843:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:20.388749Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487236432083896:3109] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:20.662568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487214957245776:2137];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:20.662647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:20.809594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.840395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.881123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... base: '/Root' iam token size: 0 2026-02-19T08:17:14.503942Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: f84ad64b-bad586de-21e7cdda-32067114 2026-02-19T08:17:14.504047Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [f84ad64b-bad586de-21e7cdda-32067114] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2026-02-19T08:17:14.504065Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [f84ad64b-bad586de-21e7cdda-32067114] Request proxy started 2026-02-19T08:17:14.504147Z node 7 :SQS DEBUG: service.cpp:761: Request [f84ad64b-bad586de-21e7cdda-32067114] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2026-02-19T08:17:14.504258Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [f84ad64b-bad586de-21e7cdda-32067114] Get configuration duration: 0ms 2026-02-19T08:17:14.504336Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [f84ad64b-bad586de-21e7cdda-32067114] Send get leader node request to sqs service for cloud4/000000000000000301v0 2026-02-19T08:17:14.504346Z node 7 :SQS DEBUG: service.cpp:581: Request [f84ad64b-bad586de-21e7cdda-32067114] Leader node for queue [cloud4/000000000000000301v0] is 7 2026-02-19T08:17:14.504369Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [f84ad64b-bad586de-21e7cdda-32067114] Got leader node for queue response. Node id: 7. Status: 0 2026-02-19T08:17:14.504470Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [f84ad64b-bad586de-21e7cdda-32067114] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" 2026-02-19T08:17:14.504562Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [f84ad64b-bad586de-21e7cdda-32067114] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" 2026-02-19T08:17:14.504617Z node 7 :SQS DEBUG: action.h:131: Request [f84ad64b-bad586de-21e7cdda-32067114] Request started. Actor: [7:7608487467919280914:5606] 2026-02-19T08:17:14.504650Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7608487467919280914:5606] 2026-02-19T08:17:14.504673Z node 7 :SQS DEBUG: service.cpp:754: Request [f84ad64b-bad586de-21e7cdda-32067114] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2026-02-19T08:17:14.507862Z node 7 :SQS TRACE: executor.cpp:286: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976710924 Step: 1771489034553 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-02-19T08:17:14.507895Z node 7 :SQS DEBUG: executor.cpp:287: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 11ms 2026-02-19T08:17:14.508443Z node 7 :SQS TRACE: executor.cpp:325: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976710924 Step: 1771489034553 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-02-19T08:17:14.508539Z node 7 :SQS TRACE: executor.cpp:327: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2026-02-19T08:17:14.508711Z node 7 :SQS DEBUG: executor.cpp:401: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 12ms 2026-02-19T08:17:14.508849Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [588075c-2b2e4ac7-1bb817e8-3ace140d] Sending executed reply 2026-02-19T08:17:14.509162Z node 7 :SQS DEBUG: action.h:625: Request [f84ad64b-bad586de-21e7cdda-32067114] Get configuration duration: 3ms 2026-02-19T08:17:14.509178Z node 7 :SQS TRACE: action.h:645: Request [f84ad64b-bad586de-21e7cdda-32067114] Got configuration. Root url: http://ghrun-fkrokuvqke.auto.internal:8771, Shards: 1, Fail: 0 2026-02-19T08:17:14.509199Z node 7 :SQS TRACE: action.h:425: Request [f84ad64b-bad586de-21e7cdda-32067114] DoRoutine 2026-02-19T08:17:14.509260Z node 7 :SQS TRACE: action.h:262: Request [f84ad64b-bad586de-21e7cdda-32067114] SendReplyAndDie from action actor { ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } } 2026-02-19T08:17:14.509398Z node 7 :SQS TRACE: proxy_service.h:35: Request [f84ad64b-bad586de-21e7cdda-32067114] Sending sqs response: { ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-02-19T08:17:14.509502Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-02-19T08:17:14.509562Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7608487467919280912:2778]: ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-02-19T08:17:14.509619Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7608487467919280914:5606]. Found: 1 2026-02-19T08:17:14.510384Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [f84ad64b-bad586de-21e7cdda-32067114] HandleResponse: { ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2026-02-19T08:17:14.510459Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [f84ad64b-bad586de-21e7cdda-32067114] Sending reply from proxy actor: { ListQueueTags { RequestId: "f84ad64b-bad586de-21e7cdda-32067114" } RequestId: "f84ad64b-bad586de-21e7cdda-32067114" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-02-19T08:17:14.510644Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:431: http request [ListQueueTags] requestId [f84ad64b-bad586de-21e7cdda-32067114] Got succesfult GRPC response. 2026-02-19T08:17:14.510693Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ListQueueTags] requestId [f84ad64b-bad586de-21e7cdda-32067114] reply ok Http output full {} 2026-02-19T08:17:14.510790Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1727: http request [ListQueueTags] requestId [f84ad64b-bad586de-21e7cdda-32067114] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 154 SourceAddress: 38f2:8423:687b:0:20f2:8423:687b:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-02-19T08:17:14.510936Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:44770) <- (200 , 2 bytes) 2026-02-19T08:17:14.511023Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:44770) connection closed >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> TSentinelTests::PDiskPileGuardHalfPile [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode >> TSentinelBaseTests::GuardianFaultyPDisks [GOOD] >> TSentinelBaseTests::GuardianRackRatio >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=171489572.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489572.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489572.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489572.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489572.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489572.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489572.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488372.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489572.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151489572.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488372.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488372.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151488372.000000s;Name=;Codec=}; 2026-02-19T08:16:12.711419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:12.744368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:12.744668Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:12.755494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:12.755790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:12.756065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:12.756216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:12.756346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:12.756478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:12.756629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:12.756798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:12.756959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:12.757111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.757232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:12.757361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:12.757478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:12.793463Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:12.793660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:12.793736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:12.793928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.794109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:12.794194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:12.794245Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:12.794343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:12.794402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:12.794442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:12.794476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:12.794672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:12.794735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:12.794781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:12.794841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:12.794949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:12.795025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:12.795068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:12.795097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:12.795148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:12.795185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:12.795213Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:12.795256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:12.795300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:12.795344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:12.795550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:12.795599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:12.795631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:12.795748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:12.795792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:12.795821Z nod ... ; 2026-02-19T08:17:16.869659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=104; 2026-02-19T08:17:16.869702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8526; 2026-02-19T08:17:16.869783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8677; 2026-02-19T08:17:16.869853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-02-19T08:17:16.869938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=37; 2026-02-19T08:17:16.869976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9288; 2026-02-19T08:17:16.870120Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=90; 2026-02-19T08:17:16.870240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-02-19T08:17:16.870363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=80; 2026-02-19T08:17:16.870473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=67; 2026-02-19T08:17:16.875146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4606; 2026-02-19T08:17:16.930894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=55612; 2026-02-19T08:17:16.931026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-02-19T08:17:16.931088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2026-02-19T08:17:16.931129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-02-19T08:17:16.931202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2026-02-19T08:17:16.931241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-02-19T08:17:16.931322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2026-02-19T08:17:16.931360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:17:16.931417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-02-19T08:17:16.931498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=48; 2026-02-19T08:17:16.931828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=296; 2026-02-19T08:17:16.931874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=80243; 2026-02-19T08:17:16.932007Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:17:16.932134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:17:16.932189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:17:16.932258Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:17:16.952097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=1; 2026-02-19T08:17:16.952265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:16.952358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=3; 2026-02-19T08:17:16.952429Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488722541;tx_id=18446744073709551615;;current_snapshot_ts=1771488974020; 2026-02-19T08:17:16.952473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:16.952519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:16.952561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:16.952653Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:16.952882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.088000s; 2026-02-19T08:17:16.954001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:17:16.954448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:17:16.954504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:16.954584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=3; 2026-02-19T08:17:16.954645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488722541;tx_id=18446744073709551615;;current_snapshot_ts=1771488974020; 2026-02-19T08:17:16.954690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:16.954750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:16.954785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:16.954881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:16.958911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.002000s; 2026-02-19T08:17:16.959006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2012:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::GuardianRackRatio [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2026-02-19T08:16:15.691728Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487212985575047:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:15.691806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:16.297865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:16.390771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:16.390852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:16.488280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:16.573780Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:16.578052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487212985575006:2081] 1771488975672005 != 1771488975672008 2026-02-19T08:16:16.593342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:16.689246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:16.689270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:16.689400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:16.689468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:16.790148Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:16.947441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:16.963373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:16:17.278502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:17.287004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:17.291421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:17.451110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.582756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.639194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:16:17.645655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.695300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.771780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.825238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.888569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.941890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.991757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.068175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.988001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230165445857:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.988070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230165445867:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.988107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.992309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230165445872:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.992431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.992533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:20.006811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487230165445871:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:20.078143Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487234460413220:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:20.521199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.559753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.600051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.647456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... s)) )) ) 2026-02-19T08:17:16.099309Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request SendMessageBatch working duration: 142ms 2026-02-19T08:17:16.099444Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse SendMessageBatch { RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "5239d680-197e2d70-66028de9-2f15b676" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f7d968c6-4683519d-ed9adafa-78732a4d" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-02-19T08:17:16.099577Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7608487471423715825:2504]: SendMessageBatch { RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "5239d680-197e2d70-66028de9-2f15b676" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f7d968c6-4683519d-ed9adafa-78732a4d" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-02-19T08:17:16.099645Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7608487471423715832:3746]. Found: 1 2026-02-19T08:17:16.100387Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [7fa088ec-eda4963a-691166c2-d79268a8] HandleResponse: { SendMessageBatch { RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "5239d680-197e2d70-66028de9-2f15b676" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f7d968c6-4683519d-ed9adafa-78732a4d" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2026-02-19T08:17:16.100538Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [7fa088ec-eda4963a-691166c2-d79268a8] Sending reply from proxy actor: { SendMessageBatch { RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "5239d680-197e2d70-66028de9-2f15b676" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f7d968c6-4683519d-ed9adafa-78732a4d" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "7fa088ec-eda4963a-691166c2-d79268a8" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"5239d680-197e2d70-66028de9-2f15b676"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"f7d968c6-4683519d-ed9adafa-78732a4d"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2026-02-19T08:17:16.104342Z node 7 :SQS DEBUG: executor.cpp:401: Request [7aa853aa-7af5883b-7030241a-3e58b7d0] Query(idx=WRITE_MESSAGE_ID) Queue [cloud4/000000000000000101v0] execution duration: 70ms 2026-02-19T08:17:16.104649Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:431: http request [SendMessageBatch] requestId [7fa088ec-eda4963a-691166c2-d79268a8] Got succesfult GRPC response. 2026-02-19T08:17:16.104889Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [SendMessageBatch] requestId [7fa088ec-eda4963a-691166c2-d79268a8] reply ok 2026-02-19T08:17:16.105052Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1727: http request [SendMessageBatch] requestId [7fa088ec-eda4963a-691166c2-d79268a8] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 619 SourceAddress: 5860:aa7a:e37b:0:4060:aa7a:e37b:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2026-02-19T08:17:16.105361Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2026-02-19T08:17:16.105392Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 6ms 2026-02-19T08:17:16.105449Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:52520) <- (200 , 465 bytes) 2026-02-19T08:17:16.105553Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:52520) connection closed 2026-02-19T08:17:16.109948Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2026-02-19T08:17:16.109978Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2026-02-19T08:17:16.110087Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2026-02-19T08:17:16.110211Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2026-02-19T08:17:16.110642Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TSentinelTests::PDiskUnknownState [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskUnknownState [GOOD] Test command err: 2026-02-19T08:17:15.284764Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:17:15.284826Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:17:15.284899Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:15.284925Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:17:15.284970Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:17:15.285074Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:17:15.285944Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:17:15.291195Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 00 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.799533Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.799578Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-02-19T08:17:17.799722Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.799786Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:17.800301Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 10:40, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-02-19T08:17:17.800389Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:17:17.800676Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-02-19T08:17:17.800726Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:40 2026-02-19T08:17:17.800823Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:17.800855Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:17:17.800994Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:17:17.801048Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-02-19T08:17:17.801079Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-02-19T08:17:17.801113Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-02-19T08:17:17.801140Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-02-19T08:17:17.801170Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-02-19T08:17:17.801213Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-02-19T08:17:17.801258Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-02-19T08:17:17.801505Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.802252Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.802303Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-02-19T08:17:17.802563Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved17 } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.802610Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 17 2026-02-19T08:17:17.802774Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.802918Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.803087Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.803228Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved15 } ResponseTime: 120110 2026-02-19T08:17:17.803278Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-02-19T08:17:17.803419Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-02-19T08:17:17.803491Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:17.803991Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 11:45, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-02-19T08:17:17.804072Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:17:17.804394Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-02-19T08:17:17.804451Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 11:45 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> TestKinesisHttpProxy::TestWrongRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:119:2057] recipient: [1:113:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:119:2057] recipient: [1:113:2143] Leader for TabletID 9437184 is [1:135:2156] sender: [1:137:2057] recipient: [1:113:2143] 2026-02-19T08:16:37.056277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:37.056341Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:37.062088Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:16:37.087049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:16:37.087470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:16:37.087792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:16:37.148067Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:16:37.199974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:16:37.200197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:16:37.202224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:16:37.202316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:16:37.202375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:16:37.202829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:16:37.203037Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:16:37.203139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:208:2156] in generation 2 Leader for TabletID 9437184 is [1:135:2156] sender: [1:216:2057] recipient: [1:14:2061] 2026-02-19T08:16:37.290529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:16:37.371444Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:16:37.371701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:16:37.371833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:222:2217] 2026-02-19T08:16:37.371870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:16:37.371903Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:16:37.371934Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:16:37.372166Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:37.372217Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:37.372498Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:16:37.372619Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:16:37.372688Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:16:37.372726Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:16:37.372772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:16:37.372820Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:16:37.372855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:16:37.372889Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:16:37.372926Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:16:37.373054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:217:2214], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:37.373104Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:37.373146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:215:2213], serverId# [1:217:2214], sessionId# [0:0:0] 2026-02-19T08:16:37.380423Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:16:37.380512Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:16:37.380632Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:16:37.380826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:16:37.380879Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:16:37.380948Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:16:37.380994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:16:37.381052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:16:37.381092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:16:37.381124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:16:37.381462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:16:37.381494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:16:37.381528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:16:37.381585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:16:37.381643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:16:37.381667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:16:37.381699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:16:37.381826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:16:37.381861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:16:37.401445Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:16:37.401554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:16:37.401603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:16:37.401659Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:16:37.401778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:16:37.402332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:228:2223], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:37.402414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:37.402460Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:227:2222], serverId# [1:228:2223], sessionId# [0:0:0] 2026-02-19T08:16:37.402614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-02-19T08:16:37.402671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:16:37.402871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-02-19T08:16:37.402922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-02-19T08:16:37.402979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:16:37.403025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:16:37.416904Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:16:37.417010Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:16:37.417305Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:37.417350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:37.417416Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:16:37.417482Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:16:37.417541Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... d_impl.h:3216: StateWork, received event# 269877761, Sender [24:299:2279], Recipient [24:240:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:17:19.345978Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:17:19.346030Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [24:298:2278], serverId# [24:299:2279], sessionId# [0:0:0] 2026-02-19T08:17:19.346285Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [24:104:2137], Recipient [24:240:2231]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 104 RawX2: 103079217241 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2026-02-19T08:17:19.346325Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:17:19.346427Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:17:19.347482Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-02-19T08:17:19.347562Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-02-19T08:17:19.347603Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-02-19T08:17:19.347644Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-02-19T08:17:19.347683Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-02-19T08:17:19.347729Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.347803Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-02-19T08:17:19.347849Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-02-19T08:17:19.347875Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-02-19T08:17:19.347897Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-02-19T08:17:19.347923Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-02-19T08:17:19.347947Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-02-19T08:17:19.347972Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-02-19T08:17:19.347996Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-02-19T08:17:19.348020Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-02-19T08:17:19.348062Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.348112Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:2] at 9437184 requested 132390 more memory 2026-02-19T08:17:19.348159Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-02-19T08:17:19.348454Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:17:19.348513Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-02-19T08:17:19.348562Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.349643Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 132518 and requests 1060144 more for the next try 2026-02-19T08:17:19.349850Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 2 released its data 2026-02-19T08:17:19.349909Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-02-19T08:17:19.350135Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:17:19.350169Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-02-19T08:17:19.350982Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 2 at 9437184 restored its data 2026-02-19T08:17:19.351042Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.351554Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 1192662 and requests 9541296 more for the next try 2026-02-19T08:17:19.351660Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 2 released its data 2026-02-19T08:17:19.351699Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-02-19T08:17:19.351878Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:17:19.351911Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-02-19T08:17:19.352464Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 2 at 9437184 restored its data 2026-02-19T08:17:19.352505Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.352980Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 10733958 and requests 85871664 more for the next try 2026-02-19T08:17:19.353067Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 2 released its data 2026-02-19T08:17:19.353102Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-02-19T08:17:19.353252Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:17:19.353283Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-02-19T08:17:19.354129Z node 24 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 2 at 9437184 restored its data 2026-02-19T08:17:19.354185Z node 24 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:17:19.712862Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-02-19T08:17:19.712990Z node 24 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:17:19.713087Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:17:19.713128Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:17:19.713184Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-02-19T08:17:19.713225Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-02-19T08:17:19.713358Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:17:19.713394Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-02-19T08:17:19.713438Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-02-19T08:17:19.713478Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-02-19T08:17:19.713539Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-02-19T08:17:19.713570Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-02-19T08:17:19.713611Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-02-19T08:17:19.730996Z node 24 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:17:19.731090Z node 24 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-02-19T08:17:19.731153Z node 24 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-02-19T08:17:19.731284Z node 24 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:17:19.732502Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [24:304:2284], Recipient [24:240:2231]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:17:19.732573Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:17:19.732630Z node 24 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [24:303:2283], serverId# [24:304:2284], sessionId# [0:0:0] 2026-02-19T08:17:19.732837Z node 24 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268830214, Sender [24:302:2282], Recipient [24:240:2231]: NKikimrTabletBase.TEvGetCounters >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPartitionTests::GetUsedStorage >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] Test command err: 2026-02-19T08:17:15.090111Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:17:15.090166Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:17:15.090227Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:15.090253Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:17:15.090299Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:17:15.090396Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:17:15.091336Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:17:15.096672Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-02-19T08:17:19.640213Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-02-19T08:17:19.640329Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-02-19T08:17:19.640379Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:19.657890Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:19.657959Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:17:19.658078Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:17:19.658122Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-02-19T08:17:19.658152Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-02-19T08:17:19.658182Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-02-19T08:17:19.658209Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-02-19T08:17:19.658250Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-02-19T08:17:19.658283Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-02-19T08:17:19.658326Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-02-19T08:17:19.658582Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.659324Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.659529Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.659666Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.659823Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.659944Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.660075Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.660212Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-02-19T08:17:19.672311Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:19.673182Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 14:56, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:19.673469Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 14:58, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:19.673517Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:19.673558Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 14:59, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:19.673644Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-02-19T08:17:19.674566Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-02-19T08:17:19.674846Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:56 2026-02-19T08:17:19.675005Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 2026-02-19T08:17:19.675048Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:58 2026-02-19T08:17:19.675081Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:59 >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2026-02-19T08:16:14.825880Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487211213078703:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:14.826644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:15.079069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:15.117821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:15.117897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:15.213068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:15.258178Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:15.260528Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487211213078658:2081] 1771488974821018 != 1771488974821021 2026-02-19T08:16:15.352776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:15.431327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:15.431351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:15.431358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:15.431474Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:15.704461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:15.711815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:16:15.838511Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:15.975021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:15.994139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:15.996251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:16.109378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.288768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.375673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.439477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:16:16.444609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.486648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.524033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.570104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.607834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.641457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.680957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.928429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228392949499:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.928553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.928987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228392949511:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.929044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228392949512:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.929177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.934433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:18.954609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487228392949515:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:19.032508Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487232687916862:3100] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:19.474657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.518996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.573235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.621227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... 3d0] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2026-02-19T08:17:18.479404Z node 7 :SQS TRACE: executor.cpp:154: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "NOW": 1771489038478, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1771489038308, "Offset": 1, "NewVisibilityDeadline": 1771489039478}, {"LockTimestamp": 1771489038358, "Offset": 2, "NewVisibilityDeadline": 1771489040478}]} 2026-02-19T08:17:18.479897Z node 7 :SQS TRACE: executor.cpp:203: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001<\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230\035g\003?\004\237\035e>\035g\003?\006\002\007\005?\010\003?\002\237\005d>\035g\003?\004\237\027f>\035g\003?\006\004\007\003\003?\014\237#d>\035g\003?\016\004\003?\020\177UO\346O\031\2443R\003?\022\177w\247k\263\235 {\362\003?\024\002\177/" } FlatMKQL: true } UserRequestId: "9166938a-dee59243-18bda4dc-a22f3d0" } ExecTimeoutPeriod: 60000 }. Params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "NOW": 1771489038478, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1771489038308, "Offset": 1, "NewVisibilityDeadline": 1771489039478}, {"LockTimestamp": 1771489038358, "Offset": 2, "NewVisibilityDeadline": 1771489040478}]} 2026-02-19T08:17:18.493014Z node 7 :SQS TRACE: executor.cpp:286: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710716 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "result" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ChangeCond" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "CurrentVisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Exists" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "NewVisibilityDeadline" Type { Kind: Data Data { Scheme: 4 } } } Member { Name: "Offset" Type { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Value { Struct { Optional { List { Struct { Bool: true } Struct { Optional { Uint64: 1771489039453 } } Struct { Bool: true } Struct { Uint64: 1771489039478 } Struct { Uint64: 1 } } List { Struct { Bool: true } Struct { Optional { Uint64: 1771489068358 } } Struct { Bool: true } Struct { Uint64: 1771489040478 } Struct { Uint64: 2 } } } } } } } 2026-02-19T08:17:18.493056Z node 7 :SQS DEBUG: executor.cpp:287: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 10ms 2026-02-19T08:17:18.493348Z node 7 :SQS TRACE: executor.cpp:325: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710716 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "result" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ChangeCond" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "CurrentVisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Exists" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "NewVisibilityDeadline" Type { Kind: Data Data { Scheme: 4 } } } Member { Name: "Offset" Type { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Value { Struct { Optional { List { Struct { Bool: true } Struct { Optional { Uint64: 1771489039453 } } Struct { Bool: true } Struct { Uint64: 1771489039478 } Struct { Uint64: 1 } } List { Struct { Bool: true } Struct { Optional { Uint64: 1771489068358 } } Struct { Bool: true } Struct { Uint64: 1771489040478 } Struct { Uint64: 2 } } } } } } } 2026-02-19T08:17:18.493440Z node 7 :SQS TRACE: executor.cpp:327: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"result": [{"ChangeCond": true, "CurrentVisibilityDeadline": 1771489039453, "Exists": true, "NewVisibilityDeadline": 1771489039478, "Offset": 1}, {"ChangeCond": true, "CurrentVisibilityDeadline": 1771489068358, "Exists": true, "NewVisibilityDeadline": 1771489040478, "Offset": 2}]} 2026-02-19T08:17:18.493538Z node 7 :SQS DEBUG: executor.cpp:401: Request [9166938a-dee59243-18bda4dc-a22f3d0] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] execution duration: 11ms 2026-02-19T08:17:18.493725Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [9166938a-dee59243-18bda4dc-a22f3d0] Sending executed reply 2026-02-19T08:17:18.493882Z node 7 :SQS TRACE: queue_leader.cpp:1436: Request [9166938a-dee59243-18bda4dc-a22f3d0] Received reply from DB: ExecComplete 2026-02-19T08:17:18.493927Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/1]. ActiveMessageRequests: 0 2026-02-19T08:17:18.494046Z node 7 :SQS TRACE: action.h:262: Request [9166938a-dee59243-18bda4dc-a22f3d0] SendReplyAndDie from action actor { ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } } 2026-02-19T08:17:18.494172Z node 7 :SQS TRACE: proxy_service.h:35: Request [9166938a-dee59243-18bda4dc-a22f3d0] Sending sqs response: { ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-02-19T08:17:18.494280Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ChangeMessageVisibilityBatch working duration: 11ms 2026-02-19T08:17:18.494355Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-02-19T08:17:18.494420Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7608487487677245987:2557]: ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-02-19T08:17:18.494478Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7608487487677245988:3951]. Found: 1 2026-02-19T08:17:18.494642Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [9166938a-dee59243-18bda4dc-a22f3d0] HandleResponse: { ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2026-02-19T08:17:18.494716Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [9166938a-dee59243-18bda4dc-a22f3d0] Sending reply from proxy actor: { ChangeMessageVisibilityBatch { RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "9166938a-dee59243-18bda4dc-a22f3d0" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-02-19T08:17:18.495197Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:431: http request [ChangeMessageVisibilityBatch] requestId [9166938a-dee59243-18bda4dc-a22f3d0] Got succesfult GRPC response. 2026-02-19T08:17:18.495296Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ChangeMessageVisibilityBatch] requestId [9166938a-dee59243-18bda4dc-a22f3d0] reply ok 2026-02-19T08:17:18.495395Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1727: http request [ChangeMessageVisibilityBatch] requestId [9166938a-dee59243-18bda4dc-a22f3d0] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 788 ResponseSizeInBytes: 196 SourceAddress: 182c:7d81:2f7c:0:2c:7d81:2f7c:0 ResourceId: 000000000000000101v0 Action: ChangeMessageVisibilityBatch 2026-02-19T08:17:18.495518Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#41,[::1]:45114) <- (200 , 44 bytes) 2026-02-19T08:17:18.495608Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#41,[::1]:45114) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |91.3%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> TPartitionTests::GetUsedStorage [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2026-02-19T08:16:17.191488Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487222229744668:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:17.191571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:17.629336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:17.657533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:17.657612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:17.756124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:17.846812Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:17.870421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:18.105309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:18.105342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:18.105350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:18.105440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:18.185820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:18.453558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:18.462927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:16:18.694184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:18.701900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:16:18.707886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:18.742441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:16:18.838398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.026052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.155131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.216858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.259458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.303837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.346467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.392136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.437298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.492697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:21.378722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487239409615273:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.378846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487239409615265:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.378982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.380120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487239409615280:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.380336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.382670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:21.399469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487239409615279:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:16:21.478223Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487239409615332:3106] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:21.884869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:21.930442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.013095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.051636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.093903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... \203\005@\002\006\203\005@\n\016\006\000?\264\003?\266\024FROM_QUEUE\003\022\000\000\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2026-02-19T08:17:20.407864Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:20.407901Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 42ms 2026-02-19T08:17:20.408346Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:20.408383Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:17:20.408504Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 42ms 2026-02-19T08:17:20.409056Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:20.646713Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487495978217138:2446]: Pool not found 2026-02-19T08:17:20.647673Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:17:20.929942Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487495978217134:2443]: Pool not found 2026-02-19T08:17:20.930697Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:20.937292Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487495978217260:2464], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:20.937308Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7608487495978217261:2465], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:20.937486Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:20.942057Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487495978217265:2467], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:20.942187Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:21.230813Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487495978217258:2463]: Pool not found 2026-02-19T08:17:21.231521Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:17:21.286638Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:48072) incoming connection opened 2026-02-19T08:17:21.286711Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:48072) -> (POST /, 87 bytes) 2026-02-19T08:17:21.286891Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b873:eff4:1c7c:0:a073:eff4:1c7c:0] request [CreateStream] url [/] database [] requestId: e8b2bac8-f0315d54-b4042cde-d1afa9dd 2026-02-19T08:17:21.287416Z node 8 :HTTP_PROXY WARN: http_req.cpp:1011: http request [CreateStream] requestId [e8b2bac8-f0315d54-b4042cde-d1afa9dd] got new request with incorrect json from [b873:eff4:1c7c:0:a073:eff4:1c7c:0] database '' 2026-02-19T08:17:21.287616Z node 8 :HTTP_PROXY INFO: http_req.cpp:878: Request [e8b2bac8-f0315d54-b4042cde-d1afa9dd] Request done. Api [Kinesis] Action [CreateStream] User [AKIDEXAMPLE] IP [b873:eff4:1c7c:0:a073:eff4:1c7c:0] Duration [0ms] Code [400] Response [ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName] 2026-02-19T08:17:21.287644Z node 8 :HTTP_PROXY INFO: http_req.cpp:1671: http request [CreateStream] requestId [e8b2bac8-f0315d54-b4042cde-d1afa9dd] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2026-02-19T08:17:21.287972Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:48072) <- (400 InvalidArgumentException, 135 bytes) 2026-02-19T08:17:21.288045Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:48072) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2026-02-19T08:17:21.288084Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:48072) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: e8b2bac8-f0315d54-b4042cde-d1afa9dd Content-Type: application/x-amz-json-1.1 Content-Length: 135 2026-02-19T08:17:21.288165Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:48072) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} >> TColumnShardTestSchema::RebootHotTiers [GOOD] |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |91.3%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=171489583.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489583.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489583.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489583.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489583.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489583.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489583.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488383.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489583.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151489583.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488383.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488383.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151488383.000000s;Name=;Codec=}; 2026-02-19T08:16:24.151137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:24.179287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:24.179558Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:24.186616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:24.186793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:24.186968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:24.187068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:24.187164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:24.187265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:24.187381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:24.187512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:24.187634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:24.187705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:24.187774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:24.187849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:24.187933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:24.210989Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:24.211141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:24.211204Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:24.211338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:24.211462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:24.211534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:24.211575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:24.211638Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:24.211679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:24.211704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:24.211723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:24.211826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:24.211884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:24.211917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:24.211943Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:24.212038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:24.212079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:24.212106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:24.212124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:24.212158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:24.212187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:24.212212Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:24.212250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:24.212292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:24.212322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:24.212459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:24.212540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:24.212581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:24.212671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:24.212701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:1 ... DataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489032480:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:17:22.014461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489032480:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:17:22.015047Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489032480:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-02-19T08:17:22.016815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489032480:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:17:22.017876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489032480:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:1450:3394];trace_detailed=; 2026-02-19T08:17:22.018462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-02-19T08:17:22.018703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-02-19T08:17:22.019007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:22.019227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:22.019582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:17:22.019754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:22.019905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:22.020154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1450:3394] finished for tablet 9437184 2026-02-19T08:17:22.020663Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1449:3393];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":61994258,"name":"_full_task","f":61994258,"d_finished":0,"c":0,"l":61996691,"d":2433},"events":[{"name":"bootstrap","f":61994598,"d_finished":1132,"c":1,"l":61995730,"d":1132},{"a":61996026,"name":"ack","f":61996026,"d_finished":0,"c":0,"l":61996691,"d":665},{"a":61996006,"name":"processing","f":61996006,"d_finished":0,"c":0,"l":61996691,"d":685},{"name":"ProduceResults","f":61995332,"d_finished":711,"c":2,"l":61996397,"d":711},{"a":61996402,"name":"Finish","f":61996402,"d_finished":0,"c":0,"l":61996691,"d":289}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:22.020752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1449:3393];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:17:22.021275Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1449:3393];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":61994258,"name":"_full_task","f":61994258,"d_finished":0,"c":0,"l":61997275,"d":3017},"events":[{"name":"bootstrap","f":61994598,"d_finished":1132,"c":1,"l":61995730,"d":1132},{"a":61996026,"name":"ack","f":61996026,"d_finished":0,"c":0,"l":61997275,"d":1249},{"a":61996006,"name":"processing","f":61996006,"d_finished":0,"c":0,"l":61997275,"d":1269},{"name":"ProduceResults","f":61995332,"d_finished":711,"c":2,"l":61996397,"d":711},{"a":61996402,"name":"Finish","f":61996402,"d_finished":0,"c":0,"l":61997275,"d":873}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; Got TEvKqpCompute::TEvScanData [1:1450:3394]->[1:1449:3393] 2026-02-19T08:17:22.021389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:17:22.016786Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:17:22.021441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:17:22.021586Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1450:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> ExternalBlobsMultipleChannels::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2026-02-19T08:17:09.107018Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.218874Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:09.218991Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:09.219048Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:09.219113Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:09.240189Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:184:2196] 2026-02-19T08:17:09.242232Z node 1 :PERSQUEUE INFO: partition_init.cpp:1053: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-02-19T08:17:09.000000Z 2026-02-19T08:17:09.242541Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:184:2196] 2026-02-19T08:17:09.253328Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.284714Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.305896Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.337568Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.368896Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.390019Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.442372Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.516285Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.008958Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.069330Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:10.069400Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:10.069473Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:10.069536Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:10.090335Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:181:2194] 2026-02-19T08:17:10.092056Z node 2 :PERSQUEUE INFO: partition_init.cpp:1053: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-02-19T08:17:10.000000Z 2026-02-19T08:17:10.092300Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:181:2194] 2026-02-19T08:17:10.116507Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.158069Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.178942Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.192792Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.234011Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.275413Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.296348Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\336\345\247\30738\001" StorageChannel: INLINE } 2026-02-19T08:17:10.451639Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.472627Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.484016Z node 2 :PERSQUEUE WARN: partition.cpp:3045: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (invalid range) Begin 4 End 2 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\336\345\247\30738\001" StorageChannel: INLINE } 2026-02-19T08:17:10.509478Z node 2 :PERSQUEUE WARN: partition.cpp:3055: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 0 Begin 2 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\336\345\247\30738\001" StorageChannel: INLINE } 2026-02-19T08:17:10.530892Z node 2 :PERSQUEUE WARN: partition.cpp:3065: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (behind the last offset) EndOffset 10 End 11 2026-02-19T08:17:10.846226Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 3 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.889773Z node 3 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:10.889830Z node 3 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:10.889868Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:10.889911Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:10.906101Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:10.906355Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:10.906937Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [3:181:2193] 2026-02-19T08:17:10.907918Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:10.907995Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2026-02-19T08:17:10.908048Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [3:181:2193] 2026-02-19T08:17:10.908111Z node 3 :PERSQUEUE DEBUG: partition.cpp:7 ... e] TPartition::CommitWriteOperations TxId: 4 2026-02-19T08:17:22.160845Z node 6 :PERSQUEUE DEBUG: partition.cpp:3984: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 4 2026-02-19T08:17:22.160876Z node 6 :PERSQUEUE DEBUG: partition.cpp:2502: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:22.160917Z node 6 :PERSQUEUE DEBUG: partition.cpp:3914: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2026-02-19T08:17:22.160949Z node 6 :PERSQUEUE DEBUG: partition.cpp:2509: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-02-19T08:17:22.160990Z node 6 :PERSQUEUE DEBUG: partition.cpp:3975: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2026-02-19T08:17:22.161043Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 6 2026-02-19T08:17:22.161079Z node 6 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][0][StateIdle] Batch completed (6) 2026-02-19T08:17:22.161141Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][0][StateIdle] Try persist 2026-02-19T08:17:22.161419Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 6 Got KV request Got KV request Wait tx rollback for tx 0 2026-02-19T08:17:22.161722Z node 6 :PERSQUEUE DEBUG: partition.cpp:2182: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:17:22.186270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2334: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-02-19T08:17:22.186378Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-02-19T08:17:22.186594Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:17:22.186658Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:22.186706Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.186773Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.186815Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.186857Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][0][StateIdle] Try persist 2026-02-19T08:17:22.186913Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait tx committed for tx 3 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2026-02-19T08:17:22.874146Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.021082Z node 7 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:23.021166Z node 7 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:23.021221Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:23.021285Z node 7 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:23.085605Z node 7 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [7:182:2194] 2026-02-19T08:17:23.087732Z node 7 :PERSQUEUE INFO: partition_init.cpp:1053: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-02-19T08:17:23.000000Z 2026-02-19T08:17:23.087818Z node 7 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [7:182:2194] 2026-02-19T08:17:23.098639Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.134022Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.158049Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.194042Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.230057Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.266045Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.357400Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.393219Z node 7 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 7 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:23.918287Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.039112Z node 8 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:24.039184Z node 8 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:24.039235Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:24.039291Z node 8 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:24.095802Z node 8 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [8:184:2196] >>>> ADD BLOB 0 writeTimestamp=2026-02-19T08:17:24.087940Z >>>> ADD BLOB 1 writeTimestamp=2026-02-19T08:17:24.087968Z >>>> ADD BLOB 2 writeTimestamp=2026-02-19T08:17:24.087989Z >>>> ADD BLOB 3 writeTimestamp=2026-02-19T08:17:24.088006Z >>>> ADD BLOB 4 writeTimestamp=2026-02-19T08:17:24.088023Z >>>> ADD BLOB 5 writeTimestamp=2026-02-19T08:17:24.088041Z >>>> ADD BLOB 6 writeTimestamp=2026-02-19T08:17:24.088057Z >>>> ADD BLOB 7 writeTimestamp=2026-02-19T08:17:24.088072Z >>>> ADD BLOB 8 writeTimestamp=2026-02-19T08:17:24.088087Z >>>> ADD BLOB 9 writeTimestamp=2026-02-19T08:17:24.088105Z 2026-02-19T08:17:24.099613Z node 8 :PERSQUEUE INFO: partition_init.cpp:1053: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-02-19T08:17:24.000000Z 2026-02-19T08:17:24.099905Z node 8 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [8:184:2196] 2026-02-19T08:17:24.111552Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.144200Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.174009Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.210077Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.245967Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.273968Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.333954Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:24.417950Z node 8 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 8 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489576.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489576.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489576.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489576.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489576.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=171489576.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488376.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489576.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151489576.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488376.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488376.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151488376.000000s;Name=;Codec=}; 2026-02-19T08:16:17.555237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:17.594425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:17.594788Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:17.602900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:17.603216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:17.603486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:17.603608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:17.603715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:17.603839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:17.603964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:17.604102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:17.604233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:17.604369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:17.604484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:17.604584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:17.604682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:17.634161Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:17.634378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:17.634459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:17.634686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:17.634886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:17.634984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:17.635038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:17.635148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:17.635215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:17.635265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:17.635300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:17.635486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:17.635565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:17.635613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:17.635654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:17.635776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:17.635860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:17.635914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:17.635951Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:17.636009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:17.636053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:17.636086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:17.636132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:17.636180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:17.636211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:17.636440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:17.636514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:17.636573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:17.636719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:17.636769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:17.636 ... 26-02-19T08:17:23.250941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=109; 2026-02-19T08:17:23.250989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=16938; 2026-02-19T08:17:23.251036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=17071; 2026-02-19T08:17:23.251103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-02-19T08:17:23.251189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=40; 2026-02-19T08:17:23.251228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=21863; 2026-02-19T08:17:23.251375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=95; 2026-02-19T08:17:23.251489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2026-02-19T08:17:23.251615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=81; 2026-02-19T08:17:23.251723Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=69; 2026-02-19T08:17:23.270107Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18299; 2026-02-19T08:17:23.275754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5495; 2026-02-19T08:17:23.275879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-02-19T08:17:23.275933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-02-19T08:17:23.275978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-02-19T08:17:23.276048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-02-19T08:17:23.276088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-02-19T08:17:23.276176Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2026-02-19T08:17:23.276217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-02-19T08:17:23.276280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2026-02-19T08:17:23.276366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2026-02-19T08:17:23.276684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=282; 2026-02-19T08:17:23.276727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=64440; 2026-02-19T08:17:23.276856Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-02-19T08:17:23.276975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-02-19T08:17:23.277031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-02-19T08:17:23.277094Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard_impl.cpp:1541;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-02-19T08:17:23.341655Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=column_engine_logs.cpp:684;event=OnTieringModified;new_count_tierings=1; 2026-02-19T08:17:23.357984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:23.358118Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=4; 2026-02-19T08:17:23.358190Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488727384;tx_id=18446744073709551615;;current_snapshot_ts=1771488978863; 2026-02-19T08:17:23.358241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:23.358292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:23.358333Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:23.358430Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:23.358658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:507;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.027000s; 2026-02-19T08:17:23.363955Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-02-19T08:17:23.364271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-02-19T08:17:23.364328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:459;event=EnqueueBackgroundActivities;periodic=0; 2026-02-19T08:17:23.364438Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:434;event=StartCleanup;portions_count=4; 2026-02-19T08:17:23.364506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:447;event=StartCleanupStop;snapshot=plan_step=1771488727384;tx_id=18446744073709551615;;current_snapshot_ts=1771488978863; 2026-02-19T08:17:23.364555Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:521;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-02-19T08:17:23.364602Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:816;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:23.364645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:848;background=cleanup;skip_reason=no_changes; 2026-02-19T08:17:23.364773Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;background=ttl;skip_reason=no_changes; 2026-02-19T08:17:23.365214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:517;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.174000s; 2026-02-19T08:17:23.365252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:449;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2026-02-19T08:16:14.966109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487209178722784:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:14.966161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:15.024844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:16:15.310115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:15.323932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:15.324036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:15.367402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:15.507084Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:15.509443Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487209178722753:2081] 1771488974964451 != 1771488974964454 2026-02-19T08:16:15.547533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:15.645569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:15.645598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:15.645604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:15.645699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:15.994427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:16.057141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:16.403513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:16.409647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:16.411611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:16.541375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.714543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.766145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:16:16.773059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.839594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:16:16.852940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.897487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.937623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.989630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.037173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.088996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:17.201702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.228892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230653560896:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.228892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230653560908:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.229030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.229561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487230653560911:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.229628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:19.233091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:19.249787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487230653560910:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:19.336879Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487230653560963:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:19.721154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.779256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.827578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.868298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose ... nceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CIq25qfHMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-02-19T08:17:21.163408Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037907] server disconnected, pipe [8:7608487497313607871:2489] destroyed 2026-02-19T08:17:21.163448Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037911] server disconnected, pipe [8:7608487497313607872:2490] destroyed 2026-02-19T08:17:21.164631Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:50444) incoming connection opened 2026-02-19T08:17:21.164724Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:50444) -> (POST /Root, 157 bytes) 2026-02-19T08:17:21.164911Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [3896:ffa:2e7c:0:2096:ffa:2e7c:0] request [ListShards] url [/Root] database [/Root] requestId: 27a7ef2d-76b64eb2-5bc35138-a1c2b2a 2026-02-19T08:17:21.165330Z node 8 :HTTP_PROXY INFO: http_req.cpp:1022: http request [ListShards] requestId [27a7ef2d-76b64eb2-5bc35138-a1c2b2a] got new request from [3896:ffa:2e7c:0:2096:ffa:2e7c:0] database '/Root' stream 'teststream' 2026-02-19T08:17:21.169862Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ListShards] requestId [27a7ef2d-76b64eb2-5bc35138-a1c2b2a] [auth] Authorized successfully 2026-02-19T08:17:21.169991Z node 8 :HTTP_PROXY INFO: http_req.cpp:731: http request [ListShards] requestId [27a7ef2d-76b64eb2-5bc35138-a1c2b2a] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1771489041.170070 274610 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:17:21.178242Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037911] server connected, pipe [8:7608487497313607884:2495], now have 1 active actors on pipe 2026-02-19T08:17:21.178284Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037907] server connected, pipe [8:7608487497313607883:2494], now have 1 active actors on pipe 2026-02-19T08:17:21.179117Z node 8 :HTTP_PROXY INFO: http_req.cpp:878: Request [27a7ef2d-76b64eb2-5bc35138-a1c2b2a] Request done. Api [Kinesis] Action [ListShards] User [AKIDEXAMPLE] Database [/Root] Queue [teststream] IP [3896:ffa:2e7c:0:2096:ffa:2e7c:0] Duration [14ms] Code [200] 2026-02-19T08:17:21.179148Z node 8 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ListShards] requestId [27a7ef2d-76b64eb2-5bc35138-a1c2b2a] reply ok 2026-02-19T08:17:21.179513Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:50444) <- (200 , 449 bytes) 2026-02-19T08:17:21.179615Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:50444) connection closed Http output full {"NextToken":"CJq25qfHMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CJq25qfHMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-02-19T08:17:21.180956Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037907] server disconnected, pipe [8:7608487497313607883:2494] destroyed 2026-02-19T08:17:21.181000Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037911] server disconnected, pipe [8:7608487497313607884:2495] destroyed 2026-02-19T08:17:21.181300Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:50460) incoming connection opened 2026-02-19T08:17:21.181388Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:50460) -> (POST /Root, 157 bytes) 2026-02-19T08:17:21.181553Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18a9:a7f9:2e7c:0:a9:a7f9:2e7c:0] request [ListShards] url [/Root] database [/Root] requestId: 52220229-989528ba-d0aec937-b81b108a 2026-02-19T08:17:21.181985Z node 8 :HTTP_PROXY INFO: http_req.cpp:1022: http request [ListShards] requestId [52220229-989528ba-d0aec937-b81b108a] got new request from [18a9:a7f9:2e7c:0:a9:a7f9:2e7c:0] database '/Root' stream 'teststream' 2026-02-19T08:17:21.182398Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ListShards] requestId [52220229-989528ba-d0aec937-b81b108a] [auth] Authorized successfully E0000 00:00:1771489041.182509 274611 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:17:21.182444Z node 8 :HTTP_PROXY INFO: http_req.cpp:731: http request [ListShards] requestId [52220229-989528ba-d0aec937-b81b108a] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:17:21.183434Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037907] server connected, pipe [8:7608487497313607895:2499], now have 1 active actors on pipe 2026-02-19T08:17:21.183513Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037911] server connected, pipe [8:7608487497313607896:2500], now have 1 active actors on pipe 2026-02-19T08:17:21.184171Z node 8 :HTTP_PROXY INFO: http_req.cpp:878: Request [52220229-989528ba-d0aec937-b81b108a] Request done. Api [Kinesis] Action [ListShards] User [AKIDEXAMPLE] Database [/Root] Queue [teststream] IP [18a9:a7f9:2e7c:0:a9:a7f9:2e7c:0] Duration [2ms] Code [200] 2026-02-19T08:17:21.184206Z node 8 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ListShards] requestId [52220229-989528ba-d0aec937-b81b108a] reply ok 2026-02-19T08:17:21.184274Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037907] server disconnected, pipe [8:7608487497313607895:2499] destroyed 2026-02-19T08:17:21.184303Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037911] server disconnected, pipe [8:7608487497313607896:2500] destroyed 2026-02-19T08:17:21.184420Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:50460) <- (200 , 449 bytes) Http output full {"NextToken":"CJ+25qfHMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CJ+25qfHMxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-02-19T08:17:21.184502Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:50460) connection closed 2026-02-19T08:17:21.219736Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:17:21.219771Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.219784Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:21.219801Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.219813Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][1][StateIdle] Try persist 2026-02-19T08:17:21.223395Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:17:21.223429Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.223441Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:21.223458Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.223471Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037908][Partition][2][StateIdle] Try persist 2026-02-19T08:17:21.227301Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:21.227332Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.227347Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:21.227367Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.227378Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037910][Partition][4][StateIdle] Try persist 2026-02-19T08:17:21.233967Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:21.233996Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.234010Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:21.234028Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.234051Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037909][Partition][3][StateIdle] Try persist 2026-02-19T08:17:21.238048Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:21.238077Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.238088Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:21.238106Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:21.238117Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037911][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2026-02-19T08:16:14.195434Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487211165534984:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:14.195501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:14.516485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:14.520906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:14.521010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:14.527154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:14.841995Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:14.843632Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487211165534941:2081] 1771488974193478 != 1771488974193481 2026-02-19T08:16:14.845085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:14.845094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:14.845101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:14.845177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:14.864042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:15.067676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:15.081613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:16:15.213835Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:15.339185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:15.347357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:15.435362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.565772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.617490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.739387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.845207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.910793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.963543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.036205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.099811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.161978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.600197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228345405814:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.600308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228345405803:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.600470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.604559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487228345405818:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.604667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.606517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:18.621876Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487228345405817:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:18.686347Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487228345405870:3119] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:19.172337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.199217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487211165534984:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:19.199296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:19.240602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.294743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.361348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/sc ... ng.cpp:392: (#37,[::1]:33344) connection closed 2026-02-19T08:17:22.140175Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:17:22.140212Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.140225Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.140241Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.140265Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037908][Partition][2][StateIdle] Try persist 2026-02-19T08:17:22.153103Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:22.153140Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.153153Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.153170Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.153184Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037909][Partition][3][StateIdle] Try persist 2026-02-19T08:17:22.153254Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:22.153266Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.153275Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.153286Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.153296Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037910][Partition][4][StateIdle] Try persist 2026-02-19T08:17:22.158319Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:22.158359Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.158370Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.158387Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.158402Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037911][Partition][0][StateIdle] Try persist 2026-02-19T08:17:22.213678Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487494678706470:2444]: Pool not found 2026-02-19T08:17:22.213827Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:22.217096Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487503268641520:2503], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.217173Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7608487503268641521:2504], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:22.217227Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.220189Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487503268641524:2505], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.220333Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.225292Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:17:22.225334Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.225349Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.225371Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.225390Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][1][StateIdle] Try persist 2026-02-19T08:17:22.240126Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:17:22.240165Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.240177Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.240196Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.240211Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037908][Partition][2][StateIdle] Try persist 2026-02-19T08:17:22.253451Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:22.253497Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.253507Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.253525Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.253540Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037909][Partition][3][StateIdle] Try persist 2026-02-19T08:17:22.253592Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:22.253602Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.253611Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.253625Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.253633Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037910][Partition][4][StateIdle] Try persist 2026-02-19T08:17:22.261618Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:22.261657Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.261669Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.261685Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.261699Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037911][Partition][0][StateIdle] Try persist 2026-02-19T08:17:22.325650Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:17:22.325693Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.325721Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.325741Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.325755Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][1][StateIdle] Try persist 2026-02-19T08:17:22.340485Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:17:22.340514Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.340524Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.340538Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.340549Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037908][Partition][2][StateIdle] Try persist 2026-02-19T08:17:22.354046Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:17:22.354077Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.354090Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.354108Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.354124Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037909][Partition][3][StateIdle] Try persist 2026-02-19T08:17:22.354170Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-02-19T08:17:22.354180Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.354190Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:22.354215Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:22.354224Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037910][Partition][4][StateIdle] Try persist >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> PersistentBufferSpaceAllocator::OccupyChunkSeedTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:17:11.649921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:11.771795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:17:11.789243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:11.789311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:17:11.789767Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:17:12.093695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:12.093855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:12.173328Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489028858028 != 1771489028858032 2026-02-19T08:17:12.186224Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:12.229830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:12.331956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:17:12.644924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:12.657971Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:12.764391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:13.058921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:13.059064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:977:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:13.059155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:13.060344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:982:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:13.060676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:13.065250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:13.194070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:980:2829], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:17:13.286432Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1038:2868] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2026-02-19T08:16:14.306875Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487209440902943:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:14.307089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:14.658782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:14.683651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:14.683755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:14.751710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:14.804910Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:14.820699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487209440902827:2081] 1771488974277021 != 1771488974277024 2026-02-19T08:16:14.932422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:14.956633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:14.956648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:14.956652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:14.956718Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:15.179203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:15.397880Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:15.473103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:15.478419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:16:15.481943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:15.585988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.775123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.853343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.921890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-02-19T08:16:15.930112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:15.969864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.052155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.097610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.151040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.209614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:16.298697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:18.876724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487226620773671:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.876824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487226620773680:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.876896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.877564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487226620773686:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.877626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:18.881415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:18.893128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487226620773685:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:16:18.965739Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487226620773738:3105] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:19.307647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487209440902943:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:19.307737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:16:19.399122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.490633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.527914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.564111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propo ... 203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\'\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OccupyChunkSeedTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2026-02-19T08:16:17.649949Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487224595110794:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:17.650538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:17.676486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:16:18.129849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:18.129973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:18.180708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:18.213118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:18.405368Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:18.409914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487224595110545:2081] 1771488977502778 != 1771488977502781 2026-02-19T08:16:18.458210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:18.562173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:18.598613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:18.598632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:18.598639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:18.598735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:18.940704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:18.947249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:16:19.227182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:16:19.232832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:16:19.235954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:16:19.270782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:16:19.360486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.585022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.637362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.690386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.777010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.841751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.877817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.928664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:19.991855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:20.032978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:21.635500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487241774981393:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.635639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.636133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487241774981405:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.636185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487241774981406:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.636292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:16:21.640788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:16:21.658452Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487241774981409:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:16:21.756587Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487241774981462:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:16:22.194336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.240628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.282438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:16:22.327957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose ... List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:24.163270Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487512548533849:2453], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:24.163564Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:24.165028Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:24.165060Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 40ms 2026-02-19T08:17:24.165483Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:24.165517Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:17:24.165638Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 40ms 2026-02-19T08:17:24.166185Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:24.433014Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487512548533791:2448]: Pool not found 2026-02-19T08:17:24.437548Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:17:24.666194Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487512548533787:2445]: Pool not found 2026-02-19T08:17:24.666326Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:24.669412Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7608487512548533899:2466], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:24.669412Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487512548533898:2465], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:24.669482Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:24.669865Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608487512548533902:2467], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:24.669915Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:25.016299Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7608487512548533896:2464]: Pool not found 2026-02-19T08:17:25.017095Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:17:25.106586Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:44956) incoming connection opened 2026-02-19T08:17:25.106698Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:44956) -> (POST /Root, 4 bytes) 2026-02-19T08:17:25.106910Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f899:239d:167c:0:e099:239d:167c:0] request [CreateStream] url [/Root] database [/Root] requestId: 9aa649f8-6aa0ee43-92ab8943-6fe319db 2026-02-19T08:17:25.107535Z node 8 :HTTP_PROXY INFO: http_req.cpp:878: Request [9aa649f8-6aa0ee43-92ab8943-6fe319db] Request done. Api [Kinesis] Action [CreateStream] User [AKIDEXAMPLE] Database [/Root] IP [f899:239d:167c:0:e099:239d:167c:0] Duration [0ms] Code [400] Response [ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map] 2026-02-19T08:17:25.107573Z node 8 :HTTP_PROXY INFO: http_req.cpp:1671: http request [CreateStream] requestId [9aa649f8-6aa0ee43-92ab8943-6fe319db] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2026-02-19T08:17:25.107867Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:44956) <- (400 MissingParameter, 127 bytes) Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2026-02-19T08:17:25.107921Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:44956) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2026-02-19T08:17:25.107969Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:44956) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 9aa649f8-6aa0ee43-92ab8943-6fe319db Content-Type: application/x-amz-json-1.1 Content-Length: 127 2026-02-19T08:17:25.108062Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:44956) connection closed >> PersistentBufferSpaceAllocator::OccupyAllSpaceAndAddNewChunk [GOOD] >> PersistentBufferSpaceAllocator::OccupyDoubleHoleTest [GOOD] >> DDisk::PersistentBufferWithRestarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489587.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489587.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489587.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489587.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=171489587.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=171489587.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488387.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151489587.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151489587.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488387.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151488387.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=151488387.000000s;Name=;Codec=}; 2026-02-19T08:16:27.608206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:27.656396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:27.656690Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:27.668166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:27.668444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:27.668687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:27.668811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:27.668923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:27.669037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:27.669174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:27.669307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:27.669440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:27.669560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:27.669665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:27.669858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:27.669973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:27.698977Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:27.699175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:27.699242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:27.699426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:27.699575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:27.699650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:27.699695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:27.699793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:27.699849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:27.699891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:27.699923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:27.700084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:27.700142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:27.700201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:27.700251Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:27.700347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:27.700403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:27.700444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:27.700480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:27.700530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:27.700570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:27.700603Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:27.700652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:27.700694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:27.700741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:27.700958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:27.701010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:27.701041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:27.701163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:27.701208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:27.701255Z node 1 :TX_ ... _id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489035931:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:17:26.348813Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489035931:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-02-19T08:17:26.349358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489035931:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-02-19T08:17:26.355856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489035931:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-02-19T08:17:26.356959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:129:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1771489035931:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:217;event=TTxScan started;actor_id=[1:1455:3399];trace_detailed=; 2026-02-19T08:17:26.357529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-02-19T08:17:26.360372Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-02-19T08:17:26.360782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:26.361007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:26.361493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:17:26.361679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:26.361856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:26.362116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1455:3399] finished for tablet 9437184 2026-02-19T08:17:26.362655Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1454:3398];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.005}],"full":{"a":63158553,"name":"_full_task","f":63158553,"d_finished":0,"c":0,"l":63163881,"d":5328},"events":[{"name":"bootstrap","f":63158927,"d_finished":3810,"c":1,"l":63162737,"d":3810},{"a":63163155,"name":"ack","f":63163155,"d_finished":0,"c":0,"l":63163881,"d":726},{"a":63163133,"name":"processing","f":63163133,"d_finished":0,"c":0,"l":63163881,"d":748},{"name":"ProduceResults","f":63162311,"d_finished":772,"c":2,"l":63163572,"d":772},{"a":63163578,"name":"Finish","f":63163578,"d_finished":0,"c":0,"l":63163881,"d":303}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; 2026-02-19T08:17:26.362742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1454:3398];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:17:26.363232Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1454:3398];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.004},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.005}],"full":{"a":63158553,"name":"_full_task","f":63158553,"d_finished":0,"c":0,"l":63164498,"d":5945},"events":[{"name":"bootstrap","f":63158927,"d_finished":3810,"c":1,"l":63162737,"d":3810},{"a":63163155,"name":"ack","f":63163155,"d_finished":0,"c":0,"l":63164498,"d":1343},{"a":63163133,"name":"processing","f":63163133,"d_finished":0,"c":0,"l":63164498,"d":1365},{"name":"ProduceResults","f":63162311,"d_finished":772,"c":2,"l":63163572,"d":772},{"a":63163578,"name":"Finish","f":63163578,"d_finished":0,"c":0,"l":63164498,"d":920}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:();counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:0ms;WaitDuration:0ms;RawBytesRead:0]) ; Got TEvKqpCompute::TEvScanData [1:1455:3399]->[1:1454:3398] 2026-02-19T08:17:26.363345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:17:26.355803Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-02-19T08:17:26.363422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:17:26.363596Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 >> PersistentBufferSpaceAllocator::MarkOccupiedTest [GOOD] >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OccupyDoubleHoleTest [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OccupyAllSpaceAndAddNewChunk [GOOD] >> PersistentBufferSpaceAllocator::OccupyHoleTest [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::MarkOccupiedTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 |91.3%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DDisk::Basic |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OccupyHoleTest [GOOD] >> PersistentBufferSpaceAllocator::OneOccupy [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> PersistentBufferSpaceAllocator::Empty [GOOD] >> PersistentBufferSpaceAllocator::FragmentationTest [GOOD] >> PersistentBufferSpaceAllocator::OccupyBestChoiseTest [GOOD] >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::PartitionKeyCompaction >> FolderServiceTest::TFolderServiceTransitional |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OneOccupy [GOOD] |91.3%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::OccupyBestChoiseTest [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TUserAccountServiceTest::Get |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::FragmentationTest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::NodeStatusComputer [GOOD] >> TSentinelTests::InitialDeploymentGracePeriod >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> TStorageBalanceTest::TestScenario3 [GOOD] >> TAccessServiceTest::PassRequestId >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TAccessServiceTest::Authenticate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=171489581.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151489581.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151488381.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-02-19T08:16:24.108584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:16:24.141842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:16:24.142080Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-02-19T08:16:24.149416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:16:24.149643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:16:24.149885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:16:24.150000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:16:24.150111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:16:24.150223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:16:24.150334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:16:24.150460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:16:24.150621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:16:24.150743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:16:24.150862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:16:24.150961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:16:24.151073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:16:24.186886Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-02-19T08:16:24.187083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:16:24.187147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:16:24.187316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:24.187505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:16:24.187591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:16:24.187639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-02-19T08:16:24.187768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-02-19T08:16:24.187844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:16:24.187888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:16:24.187920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-02-19T08:16:24.188100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-02-19T08:16:24.188173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:16:24.188215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:16:24.188254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-02-19T08:16:24.188370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-02-19T08:16:24.188455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:16:24.188506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:16:24.188537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-02-19T08:16:24.188609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:16:24.188649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:16:24.188685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-02-19T08:16:24.188726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:16:24.188762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:16:24.188794Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-02-19T08:16:24.189015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:16:24.189072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:16:24.189104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-02-19T08:16:24.189227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:16:24.189270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:16:24.189299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-02-19T08:16:24.189368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:16:24.189419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:16:24.189465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-02-19T08:16:24.189514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... 7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; 2026-02-19T08:17:32.059799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:23.375ms;WaitDuration:0ms;RawBytesRead:653952], [StepName: PREPARE_RESULT; ExecutionDuration:2.873ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.107ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.232ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; 2026-02-19T08:17:32.059956Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:150;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-02-19T08:17:32.060182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:241;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:23.375ms;WaitDuration:0ms;RawBytesRead:653952], [StepName: PREPARE_RESULT; ExecutionDuration:2.873ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.107ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.232ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; 2026-02-19T08:17:32.060446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:246;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:23.375ms;WaitDuration:0ms;RawBytesRead:653952], [StepName: PREPARE_RESULT; ExecutionDuration:2.873ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.107ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.232ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; 2026-02-19T08:17:32.060855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:486: Scan [1:1235:3172] finished for tablet 9437184 2026-02-19T08:17:32.061401Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:492;event=scan_finish;compute_actor_id=[1:1234:3171];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.062},{"events":["l_task_result"],"t":0.281},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.284}],"full":{"a":70094519,"name":"_full_task","f":70094519,"d_finished":0,"c":0,"l":70379506,"d":284987},"events":[{"name":"bootstrap","f":70094820,"d_finished":1172,"c":1,"l":70095992,"d":1172},{"a":70378523,"name":"ack","f":70156705,"d_finished":99772,"c":71,"l":70378417,"d":100755},{"a":70378510,"name":"processing","f":70096145,"d_finished":216485,"c":143,"l":70378421,"d":217481},{"name":"ProduceResults","f":70095529,"d_finished":185647,"c":216,"l":70379061,"d":185647},{"a":70379068,"name":"Finish","f":70379068,"d_finished":0,"c":0,"l":70379506,"d":438},{"name":"task_result","f":70096164,"d_finished":113640,"c":72,"l":70376353,"d":113640}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:23.375ms;WaitDuration:0ms;RawBytesRead:653952], [StepName: PREPARE_RESULT; ExecutionDuration:2.873ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.107ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.232ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; 2026-02-19T08:17:32.061501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:422;event=send_data;compute_actor_id=[1:1234:3171];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-02-19T08:17:32.062074Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:427;event=scan_finished;compute_actor_id=[1:1234:3171];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.062},{"events":["l_task_result"],"t":0.281},{"events":["l_ProduceResults","f_Finish"],"t":0.284},{"events":["l_ack","l_processing","l_Finish"],"t":0.285}],"full":{"a":70094519,"name":"_full_task","f":70094519,"d_finished":0,"c":0,"l":70380136,"d":285617},"events":[{"name":"bootstrap","f":70094820,"d_finished":1172,"c":1,"l":70095992,"d":1172},{"a":70378523,"name":"ack","f":70156705,"d_finished":99772,"c":71,"l":70378417,"d":101385},{"a":70378510,"name":"processing","f":70096145,"d_finished":216485,"c":143,"l":70378421,"d":218111},{"name":"ProduceResults","f":70095529,"d_finished":185647,"c":216,"l":70379061,"d":185647},{"a":70379068,"name":"Finish","f":70379068,"d_finished":0,"c":0,"l":70380136,"d":1068},{"name":"task_result","f":70096164,"d_finished":113640,"c":72,"l":70376353,"d":113640}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);per_step_counters:([StepName: DETECT_IN_MEM_FLAG; ExecutionDuration:0.01ms;WaitDuration:0ms;RawBytesRead:0], [StepName: PROGRAM_EXECUTION; ExecutionDuration:23.375ms;WaitDuration:0ms;RawBytesRead:653952], [StepName: PREPARE_RESULT; ExecutionDuration:2.873ms;WaitDuration:0ms;RawBytesRead:0], [StepName: INITIALIZE_SOURCE; ExecutionDuration:0.107ms;WaitDuration:0ms;RawBytesRead:0], [StepName: DUPLICATE; ExecutionDuration:0.025ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_STAGE_RESULT; ExecutionDuration:0.232ms;WaitDuration:0ms;RawBytesRead:0], [StepName: BUILD_RESULT; ExecutionDuration:7.639ms;WaitDuration:0ms;RawBytesRead:0]);counters_summ_across_all_steps:([StepName: AllSteps; ExecutionDuration:34.261ms;WaitDuration:0ms;RawBytesRead:653952]) ; Got TEvKqpCompute::TEvScanData [1:1235:3172]->[1:1234:3171] 2026-02-19T08:17:32.062242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-02-19T08:17:31.774795Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10565848;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10565848;selected_rows=0; 2026-02-19T08:17:32.062285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-02-19T08:17:32.062423Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |91.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TConfigsDispatcherTests::TestRemoveSubscription >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> FolderServiceTest::TFolderService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] Test command err: 2026-02-19T08:17:32.491833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:32.491906Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:32.806853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:34.247819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:34.247893Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:34.330995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] Test command err: 2026-02-19T08:15:11.230183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486942138717514:2201];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:11.231375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:11.289102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:15:11.901820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:11.940541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:11.940647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:12.120514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:12.230224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:12.234381Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:12.235486Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486942138717336:2081] 1771488911195667 != 1771488911195670 2026-02-19T08:15:12.253349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:12.408705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:12.408726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:12.408732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:12.408818Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:12.701646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:12.710475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:13.067469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:15:13.095240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:15:13.139320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:15:13.319721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.547088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.650268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:15:13.658085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.747116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.798731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.844314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.894237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.941944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:13.991363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:14.035262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:15.750664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959318588188:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:15.750796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:15.751182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959318588200:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:15.751317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486959318588201:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:15.751461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:15.756020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:15.775627Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486959318588204:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:15:15.875051Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486959318588255:3107] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:16.218152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486942138717514:2201];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:16.218253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:16.492075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:16.541583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:16.587974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation ... :33.342065Z node 3 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:33.342106Z node 3 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 4ms 2026-02-19T08:17:33.342501Z node 3 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:33.342534Z node 3 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:17:33.342646Z node 3 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 7ms 2026-02-19T08:17:33.343235Z node 3 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:33.394068Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487549465547665:2908], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:33.394139Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487549465547666:2909], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:33.394177Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:33.394822Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487549465547669:2910], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:33.395204Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:33.400108Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487549465547663:2907]: Pool not found 2026-02-19T08:17:33.400850Z node 3 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:17:34.013512Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487553760514983:2915], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.013616Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487553760514984:2916], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:34.013676Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.015210Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487553760514987:2917], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.015263Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.025913Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487553760514981:2914]: Pool not found 2026-02-19T08:17:34.029339Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:34.038252Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487553760515004:2922], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.038367Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487553760515005:2923], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:34.038441Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.039760Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487553760515008:2924], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.039828Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:34.048033Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487553760515002:2921]: Pool not found 2026-02-19T08:17:34.050003Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:17:34.229957Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:246: (#37,[::1]:40080) connection closed by inactivity timeout >> DDisk::Basic [GOOD] >> DDisk::PersistentBufferFreeSpace >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization [GOOD] >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |91.4%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> PhantomBlobs::TestTwoDeadAllAliveInactiveMirror3dcMemoryLimit10_KB [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2026-02-19T08:13:47.608155Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:47.637648Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:47.638662Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:47.638717Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:47.638778Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:47.639065Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:47.640217Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2026-02-19T08:13:47.640266Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:47.640384Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:47.640518Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:47.656215Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:47.656312Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:47.658965Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659157Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659299Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659469Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659604Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659732Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659876Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.659917Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:47.660040Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2026-02-19T08:13:47.660081Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2026-02-19T08:13:47.660159Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:47.660238Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:47.660594Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:47.663772Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:47.664340Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:47.664426Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:47.664484Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:47.664806Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:47.667740Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:47.667830Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:47.668919Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2026-02-19T08:13:47.668972Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:47.669056Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:47.669192Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:47.683979Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:47.684057Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:47.685324Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.685451Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.685558Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.685668Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.685845Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.686007Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.686178Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:47.686207Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:47.686277Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2026-02-19T08:13:47.686312Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2026-02-19T08:13:47.686359Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:47.686408Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:47.688468Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2026-02-19T08:13:47.688557Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:47.688853Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:47.689117Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:47.697430Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:47.697519Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:47.698537Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:47.698678Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:47.698884Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:47.698923Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-02-19T08:13:47.712898Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:47.716320Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:47.716652Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:47.716728Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-02-19T08:13:47.716770Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-02-19T08:13:47.716940Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:47.717144Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2092] 2026-02-19T08:13:47.717179Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2092] 2026-02-19T08:13:47.717310Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:47.717384Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:101:2089] 2026-02-19T08:13:47.717409Z node 2 :PIPE_CLIENT DEB ... e348d2969a0571] Result# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-02-19T08:17:31.108455Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [64e348d2969a0571] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:17:31.109153Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:488:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-02-19T08:17:31.109319Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} commited cookie 1 for step 488 2026-02-19T08:17:31.111942Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-02-19T08:17:31.112008Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:17:31.112291Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{988, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-02-19T08:17:31.112346Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:489} Tx{1479, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:17:31.112464Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1355:2259] 2026-02-19T08:17:31.112499Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037932033] push event to server [20:1355:2259] 2026-02-19T08:17:31.112554Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1291:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.054) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) 2026-02-19T08:17:31.215972Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-02-19T08:17:31.216062Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:17:31.216207Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380146912}: tablet 72075186224037983 wasn't changed 2026-02-19T08:17:31.216250Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380146912}: tablet 72075186224037983 skipped channel 0 2026-02-19T08:17:31.216331Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380146912}: tablet 72075186224037983 skipped channel 1 2026-02-19T08:17:31.216367Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380146912}: tablet 72075186224037983 skipped channel 2 2026-02-19T08:17:31.216457Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137120380146912}(72075186224037983)::Execute - TryToBoot was not successfull 2026-02-19T08:17:31.216546Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{989, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-02-19T08:17:31.216595Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1480, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:17:31.253751Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2543b6371002f6c] bootstrap ActorId# [20:11687:4484] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:489:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-02-19T08:17:31.253945Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2543b6371002f6c] Id# [72057594037927937:2:489:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-02-19T08:17:31.253996Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2543b6371002f6c] restore Id# [72057594037927937:2:489:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-02-19T08:17:31.254049Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2543b6371002f6c] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:489:0:0:246:1] Marker# BPG33 2026-02-19T08:17:31.254087Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2543b6371002f6c] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:489:0:0:246:1] Marker# BPG32 2026-02-19T08:17:31.254227Z node 20 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [20:389:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:489:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-02-19T08:17:31.267545Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2543b6371002f6c] received {EvVPutResult Status# OK ID# [72057594037927937:2:489:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 506 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 507 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-02-19T08:17:31.267692Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2543b6371002f6c] Result# TEvPutResult {Id# [72057594037927937:2:489:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-02-19T08:17:31.267744Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2543b6371002f6c] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:489:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-02-19T08:17:31.268579Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:489:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-02-19T08:17:31.268735Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} commited cookie 1 for step 489 2026-02-19T08:17:31.271259Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1481, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-02-19T08:17:31.271322Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1481, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:17:31.271534Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1481, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{990, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-02-19T08:17:31.271583Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:490} Tx{1481, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:17:31.271690Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1355:2259] 2026-02-19T08:17:31.271721Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037932033] push event to server [20:1355:2259] 2026-02-19T08:17:31.271777Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1291:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.054) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.05) 2026-02-19T08:17:31.378203Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1482, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-02-19T08:17:31.378293Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1482, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:17:31.378432Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380150496}: tablet 72075186224037986 wasn't changed 2026-02-19T08:17:31.378479Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380150496}: tablet 72075186224037986 skipped channel 0 2026-02-19T08:17:31.378564Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380150496}: tablet 72075186224037986 skipped channel 1 2026-02-19T08:17:31.378602Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{137120380150496}: tablet 72075186224037986 skipped channel 2 2026-02-19T08:17:31.378676Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137120380150496}(72075186224037986)::Execute - TryToBoot was not successfull 2026-02-19T08:17:31.378748Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1482, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{991, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-02-19T08:17:31.378816Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1482, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> TConfigsDispatcherTests::TestSubscriptionNotification >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |91.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> FolderServiceTest::TFolderServiceAdapter |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2026-02-19T08:17:31.683177Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487541802624543:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:31.683231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:31.781390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:32.414974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:32.415098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:32.458955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:32.479524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:32.511440Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487541802624406:2081] 1771489051672987 != 1771489051672990 2026-02-19T08:17:32.524896Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:32.679375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:32.689874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:33.173398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:33.214010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:33.225536Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdc6838edd0] Connect to grpc://localhost:27921 2026-02-19T08:17:33.399558Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdc6838edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-02-19T08:17:33.461984Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdc6838edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27921: Failed to connect to remote host: Connection refused 2026-02-19T08:17:33.463828Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdc6838edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-02-19T08:17:33.464350Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdc6838edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27921: Failed to connect to remote host: Connection refused 2026-02-19T08:17:34.466294Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdc6838edd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-02-19T08:17:34.472799Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdc6838edd0] Status 5 Not Found 2026-02-19T08:17:34.474027Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdc6838edd0] Request ListFoldersRequest { id: "i_am_exists" } 2026-02-19T08:17:34.487535Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdc6838edd0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/unittest >> PhantomBlobs::TestTwoDeadAllAliveInactiveMirror3dcMemoryLimit10_KB [GOOD] Test command err: RandomSeed# 4160261753085206457 Write blobs Set Keep flags Wait for sync Stop dead nodes Stop node# 1 2026-02-19T08:14:19.015516Z 2 00h31m00.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 1 Stop node# 5 2026-02-19T08:14:21.846800Z 2 00h35m00.200000s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Wait for sync Set DoNotKeepFlags on first half of blobs Wait for sync Write unsynced blobs batch, blobs written# 0 Write unsynced blobs batch, blobs written# 1000 Write unsynced blobs batch, blobs written# 2000 Write unsynced blobs batch, blobs written# 3000 Write unsynced blobs batch, blobs written# 4000 Write unsynced blobs batch, blobs written# 5000 Write unsynced blobs batch, blobs written# 6000 Write unsynced blobs batch, blobs written# 7000 Write unsynced blobs batch, blobs written# 8000 Write unsynced blobs batch, blobs written# 9000 Force syncLog trim Set DoNotKeepFlags on second half of blobs Wait for sync Write unsynced blobs batch, blobs written# 0 Write unsynced blobs batch, blobs written# 1000 Write unsynced blobs batch, blobs written# 2000 Write unsynced blobs batch, blobs written# 3000 Write unsynced blobs batch, blobs written# 4000 Write unsynced blobs batch, blobs written# 5000 Write unsynced blobs batch, blobs written# 6000 Write unsynced blobs batch, blobs written# 7000 Write unsynced blobs batch, blobs written# 8000 Write unsynced blobs batch, blobs written# 9000 Force syncLog trim Checking memory consumption: Consumed# 0 Limit# 72704 Restart nodes Start dead nodes Start node# 1 Start node# 5 Wait for sync Move soft barrier Wait for sync Group status# TEvStatusResult {Status# OK StatusFlags# { }} >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2026-02-19T08:17:32.366371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487544092237308:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:32.366440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:32.404424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:32.741499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:32.745858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:32.745974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:32.748290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:32.887011Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:32.900628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:33.376093Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:33.727933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:33.739109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TAccessServiceTest::PassRequestId [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |91.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] Test command err: 2026-02-19T08:17:35.285521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:35.285596Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:35.517658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] >> TAccessServiceTest::Authenticate [GOOD] >> TServiceAccountServiceTest::Get >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TServiceAccountServiceTest::Get [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] Test command err: 2026-02-19T08:17:13.348817Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:17:13.348887Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:17:13.348954Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:13.348986Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:17:13.349031Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:17:13.349112Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:17:13.350016Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:17:13.355589Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-02-19T08:17:36.846589Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-02-19T08:17:36.846739Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-02-19T08:17:36.846903Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-02-19T08:17:36.846961Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:36.857727Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:36.857791Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:17:36.857907Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-02-19T08:17:36.857959Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-02-19T08:17:36.857992Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-02-19T08:17:36.858025Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-02-19T08:17:36.858066Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-02-19T08:17:36.858098Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-02-19T08:17:36.858128Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-02-19T08:17:36.858160Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-02-19T08:17:36.858635Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.859247Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.859499Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.859632Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.859757Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.859909Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.860046Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.860197Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-02-19T08:17:36.860264Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:36.860710Z node 9 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 12:51, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:36.860790Z node 9 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-02-19T08:17:36.861065Z node 9 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 6 2026-02-19T08:17:36.861122Z node 9 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:51 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-02-19T08:17:34.451526Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487553188566128:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:34.452129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:34.774076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:34.818457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:34.818589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:34.917862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487553188565892:2081] 1771489054407507 != 1771489054407510 2026-02-19T08:17:34.928587Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:34.946199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:35.057263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:35.255631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:35.300497Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c911ae8edd0]{trololo} Connect to grpc://localhost:19505 2026-02-19T08:17:35.301773Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c911ae8edd0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-02-19T08:17:35.324748Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c911ae8edd0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2026-02-19T08:17:35.440996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TableCreation::UpdateTableWithAclModification >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2026-02-19T08:17:35.106233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487557401727210:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:35.106523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:35.408232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:35.408321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:35.447728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:35.477850Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487557401726947:2081] 1771489055019182 != 1771489055019185 2026-02-19T08:17:35.483688Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:35.486706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:35.624079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:35.707906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:35.716717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:35.746396Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d2c6ec8edd0] Connect to grpc://localhost:65081 2026-02-19T08:17:35.747860Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2c6ec8edd0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2026-02-19T08:17:35.766042Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d2c6ec8edd0] Status 7 Permission Denied 2026-02-19T08:17:35.767538Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2c6ec8edd0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2026-02-19T08:17:35.773683Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d2c6ec8edd0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TestScriptExecutionsUtils::TestRetryLimiterWithLinearBackoff [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiterWithExponentialBackoff [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> KqpProxy::InvalidSessionID |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiterWithExponentialBackoff [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2026-02-19T08:17:36.179435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487562890456009:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:36.180237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:36.438055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:36.457262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:36.457429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:36.546146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:36.546510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487562890455940:2081] 1771489056163453 != 1771489056163456 2026-02-19T08:17:36.556920Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:36.614653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:36.824461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:36.829808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:36.834389Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf60538edd0] Connect to grpc://localhost:16287 2026-02-19T08:17:36.875143Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf60538edd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-02-19T08:17:36.905879Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf60538edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16287: Failed to connect to remote host: Connection refused 2026-02-19T08:17:36.909810Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf60538edd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-02-19T08:17:36.914003Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf60538edd0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16287: Failed to connect to remote host: Connection refused 2026-02-19T08:17:37.186723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:37.918265Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf60538edd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-02-19T08:17:37.926462Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf60538edd0] Status 5 Not Found 2026-02-19T08:17:37.927164Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf60538edd0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-02-19T08:17:37.934310Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf60538edd0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } 2026-02-19T08:17:37.938064Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf60538edd0] Request ResolveFoldersRequest { folder_ids: "i_am_empty" } 2026-02-19T08:17:37.939871Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf60538edd0] Response ResolveFoldersResponse { } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] Test command err: 2026-02-19T08:17:37.881270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:37.881346Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:38.137899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:39.529534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:39.529608Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:39.612366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TSequence::CreateSequenceParallel >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> TSequence::CreateSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |91.4%| [TA] $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> PQCountersLabeled::PartitionKeyCompaction [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |91.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TSentinelTests::BSControllerUnresponsive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-02-19T08:17:39.968472Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:17:39.978916Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:17:39.979312Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:17:39.979380Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:39.979477Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:17:39.980260Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-02-19T08:17:39.980374Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:17:40.007426Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:17:40.007604Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:40.008518Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:17:40.008668Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:40.009151Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:40.009523Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-02-19T08:17:40.011901Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:40.011980Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:17:40.012031Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-02-19T08:17:40.012082Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:40.012171Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:40.012213Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:17:40.012251Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:40.012294Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:40.012330Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:40.012373Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:40.012407Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:17:40.012526Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:40.012757Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:17:40.013324Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-02-19T08:17:40.076741Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:17:40.081107Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:17:40.081455Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-02-19T08:17:40.081535Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:40.081597Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928137] doesn't have tx writes info 2026-02-19T08:17:40.082407Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [2:397:2353], now have 1 active actors on pipe 2026-02-19T08:17:40.082507Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:17:40.085274Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:17:40.085403Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:40.086231Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:17:40.086398Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:40.086746Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:40.087013Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:405:2324] 2026-02-19T08:17:40.089032Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:40.089093Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-02-19T08:17:40.089137Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:405:2324] 2026-02-19T08:17:40.089187Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:40.089241Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:40.089289Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:17:40.089324Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:40.089383Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:40.089421Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:40.089459Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:40.089502Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928137][Partition][0][StateIdle] Try persist 2026-02-19T08:17:40.089592Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:40.089866Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:17:40.090342Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [2:408:2358], now have 1 active actors on pipe 2026-02-19T08:17:40.107740Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:17:40.110793Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:17:40.111140Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-02-19T08:17:40.111197Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:40.111254Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928138] doesn't have tx writes info 2026-02-19T08:17:40.111971Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [2:457:2394], now have 1 active actors on pipe 2026-02-19T08:17:40.112110Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:17:40.114299Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:17:40.114419Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:40.115250Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions ... StateIdle] Process user action and tx pending commits 2026-02-19T08:17:41.398727Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:41.398770Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:17:41.398907Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:41.399232Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:17:41.399828Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:530:2441], now have 1 active actors on pipe 2026-02-19T08:17:41.401280Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2026-02-19T08:17:41.401411Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [3:537:2445], now have 1 active actors on pipe 2026-02-19T08:17:41.406579Z node 3 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:17:41.407038Z node 3 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:17:41.407215Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:538:2445], now have 1 active actors on pipe 2026-02-19T08:17:41.407572Z node 3 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:17:41.422887Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:546:2452], now have 1 active actors on pipe 2026-02-19T08:17:41.483928Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:17:41.486390Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:17:41.487973Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:41.488054Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:17:41.488199Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:41.488588Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:41.488893Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:602:2455] 2026-02-19T08:17:41.491279Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:17:41.492853Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:17:41.493233Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:17:41.493373Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:17:41.493790Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:17:41.493893Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:17:41.494164Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:17:41.494223Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:17:41.494281Z node 3 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:17:41.494329Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:17:41.494450Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:17:41.494737Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:17:41.494830Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:41.494903Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:17:41.494965Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:602:2455] 2026-02-19T08:17:41.495030Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:41.495105Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:41.495160Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:17:41.495211Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:17:41.495257Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:41.495304Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:41.495349Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:17:41.495392Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:17:41.495521Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:41.495755Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:17:41.496544Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [3:537:2445] destroyed 2026-02-19T08:17:41.496650Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [3:536:2444] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> PrivateApi::PingTask ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:13:20.721122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:13:20.721225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:20.721292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:13:20.721333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:13:20.721368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:13:20.721421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:13:20.721455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:13:20.721479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:13:20.721548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:13:20.721629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:13:20.722632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:13:20.722961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:13:20.819616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:13:20.819687Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:20.833502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:13:20.834382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:13:20.834577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:13:20.848168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:13:20.848936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:13:20.849685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:13:20.850036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:13:20.854339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:20.854518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:13:20.855751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:13:20.855813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:13:20.855920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:13:20.855973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:13:20.856016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:13:20.856185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:13:21.020260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.021386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.021521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.021618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:13:21.022836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... pleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 123 Memory: 124456 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 4 StartTime: 133 TableOwnerId: 72057594046678944 FollowerId: 0 2026-02-19T08:17:39.752565Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:17:39.752628Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0123 2026-02-19T08:17:39.752759Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:17:39.752801Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:17:39.798162Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.798254Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.798301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-02-19T08:17:39.798377Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 1 2026-02-19T08:17:39.798415Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-02-19T08:17:39.798542Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-02-19T08:17:39.798608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-02-19T08:17:39.798648Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-02-19T08:17:39.798752Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:607: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-02-19T08:17:39.798851Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:39.810120Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.810200Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.810252Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:17:39.843886Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [4:1121:3084]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:17:39.844185Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3537: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 39 2026-02-19T08:17:39.844636Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [4:1121:3084], Recipient [4:133:2156]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 39 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 45 Memory: 124456 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 4 StartTime: 303 TableOwnerId: 72057594046678944 FollowerId: 0 2026-02-19T08:17:39.844688Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:17:39.844737Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0045 2026-02-19T08:17:39.844866Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:17:39.844905Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:17:39.886513Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2026-02-19T08:17:39.886624Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-02-19T08:17:39.886709Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2026-02-19T08:17:39.886812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2026-02-19T08:17:39.886847Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-02-19T08:17:39.887004Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.887050Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.887079Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-02-19T08:17:39.887143Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 1 2026-02-19T08:17:39.887174Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-02-19T08:17:39.887282Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 13940 row count 100 2026-02-19T08:17:39.887340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2026-02-19T08:17:39.887375Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-02-19T08:17:39.887467Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:602: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2026-02-19T08:17:39.887531Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:689: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2026-02-19T08:17:39.887577Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-02-19T08:17:39.887657Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:39.901980Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.902059Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:17:39.902099Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:17:40.110269Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:17:40.110340Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:17:40.110440Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [4:133:2156], Recipient [4:133:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:17:40.110480Z node 4 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2026-02-19T08:17:34.531022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487552773610275:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:34.531076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:34.958830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:35.012712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:35.012829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:35.095372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:35.175147Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:35.178019Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487552773610235:2081] 1771489054497018 != 1771489054497021 2026-02-19T08:17:35.241370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:35.379100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:35.578035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:38.394911Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487570750763985:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:38.395075Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:38.423004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:38.519268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:38.519364Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:38.523965Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:38.538410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:38.621019Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:38.740413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:38.747405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> HashShuffle::Compatibility+ShuffleElimination-HashV2 >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> KqpScanSpilling::HandleErrorsCorrectly >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2026-02-19T08:17:38.177456Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487569717146056:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:38.177528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:38.601815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:38.668318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:38.668415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:38.789883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487569717146006:2081] 1771489058175255 != 1771489058175258 2026-02-19T08:17:38.814841Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:38.840387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:38.860509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:39.141110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:39.150164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:39.191895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:39.274073Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf3246424d0] Connect to grpc://localhost:2053 2026-02-19T08:17:39.275043Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf3246424d0] Request ListFoldersRequest { id: "i_am_exists" } 2026-02-19T08:17:39.298653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:39.300789Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf3246424d0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2026-02-19T08:17:39.306776Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf32466d2d0] Connect to grpc://localhost:5148 2026-02-19T08:17:39.307701Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf32466d2d0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-02-19T08:17:39.316303Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf32466d2d0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2026-02-19T08:17:39.318296Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf32466d2d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-02-19T08:17:39.321325Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf32466d2d0] Status 5 Not Found 2026-02-19T08:17:39.325701Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf3246424d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-02-19T08:17:39.327935Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf3246424d0] Status 5 Not Found 2026-02-19T08:17:39.328553Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf32466d2d0] Request ResolveFoldersRequest { folder_ids: "i_am_empty" } 2026-02-19T08:17:39.330236Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf32466d2d0] Response ResolveFoldersResponse { } >> TPQTestSlow::TestWriteVeryBigMessage >> TPQTestSlow::TestOnDiskStoredSourceIds >> HashShuffle::Compatibility-ShuffleElimination-HashV2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2026-02-19T08:17:21.384542Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:17:21.384613Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:17:21.384680Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:21.384710Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:17:21.384766Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:17:21.384845Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:17:21.385868Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:17:21.390624Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-02-19T08:17:40.945274Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-02-19T08:17:40.945409Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-02-19T08:17:40.945482Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:40.945951Z node 1 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 6:25, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:40.946018Z node 1 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 7:30, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:40.946066Z node 1 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 7:31, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:40.946109Z node 1 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-02-19T08:17:40.956602Z node 1 :CMS DEBUG: sentinel.cpp:1407: [Sentinel] [Main] Retrying: attempt# 1 2026-02-19T08:17:40.956674Z node 1 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-02-19T08:17:40.969937Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:40.970009Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-02-19T08:17:40.970120Z node 1 :CMS DEBUG: sentinel.cpp:1407: [Sentinel] [Main] Retrying: attempt# 2 2026-02-19T08:17:40.970152Z node 1 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-02-19T08:17:40.970300Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-02-19T08:17:40.970347Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-02-19T08:17:40.970384Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-02-19T08:17:40.970415Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-02-19T08:17:40.970459Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-02-19T08:17:40.970500Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-02-19T08:17:40.970531Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-02-19T08:17:40.970560Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-02-19T08:17:40.970938Z node 1 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 159 2026-02-19T08:17:40.970979Z node 1 :CMS ERROR: sentinel.cpp:1389: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-02-19T08:17:40.971367Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.971725Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972088Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972220Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972406Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972553Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972717Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972869Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-02-19T08:17:40.972947Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:40.973378Z node 1 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 3 >> SlowTopicAutopartitioning::CDC_Write >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> KqpRe2::IncorrectRegexNoError |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> KqpScanSpilling::SpillingPragmaParseError >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> KqpScanLogs::GraceJoin+EnabledLogs >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> KqpScanLogs::WideCombine+EnabledLogs >> KqpScanSpilling::SelfJoinQueryService >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-false >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize10-4 >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2026-02-19T08:17:38.235415Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487572215275277:2199];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:38.235467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:38.719364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:38.719907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:38.783602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:38.809919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487572215275107:2081] 1771489058202054 != 1771489058202057 2026-02-19T08:17:38.824956Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:38.828716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:39.021822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:39.158161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:39.165066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:39.269947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:42.596539Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487587081067978:2213];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:42.596599Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:42.689879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:42.784634Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:42.801810Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:42.801911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:42.830754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:42.958181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:43.138734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:43.146575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |91.4%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> PQCountersLabeled::PartitionBlobCompactionCounters [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-false [GOOD] >> TestMalformedRequest::ContentLengthHigher [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence |91.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize10-4 [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] >> TableCreation::UpdateTableWithAclModification [GOOD] >> TableCreation::UpdateTableWithAclRollback >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-02-19T08:17:48.414632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:48.414744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:48.414812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:48.414877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:48.414915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:48.414953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:48.415000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:48.415032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:48.415088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:48.415180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:48.416140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:48.416454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:48.606566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:48.606659Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:48.612683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:48.612875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:48.613049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:48.616519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:48.616702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:48.617480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:48.617726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:48.619737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:48.619931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:48.620745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:48.620807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:48.621176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:48.621227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:48.621276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:48.621467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:48.796547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.797593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.797868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.797960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:48.798798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 2Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:17:50.314727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:17:50.316343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:17:50.316623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:17:50.317393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:17:50.317680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:17:50.317757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:17:50.317859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:17:50.317902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:17:50.318005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:17:50.318051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:17:50.318086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:17:50.318168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:740:2717] message: TxId: 101 2026-02-19T08:17:50.318221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:17:50.318267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:17:50.318301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:17:50.318444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:17:50.320205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:17:50.320256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:741:2718] TestWaitNotification: OK eventTxId 101 2026-02-19T08:17:50.764631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:17:50.764777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:17:50.764910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableCorrectness, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.044000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.044000Z, at schemeshard: 72057594046678944 2026-02-19T08:17:50.765014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableCorrectness, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:17:50.765086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:17:50.765252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460044000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:17:50.765812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:17:50.786995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:50.787072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:17:50.789733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:50.789907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:17:50.790000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableCorrectness, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.047000Z, at schemeshard: 72057594046678944 2026-02-19T08:17:50.790057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:17:50.790595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableCorrectness" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:17:50.790921Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableCorrectness" took 373us result status StatusSuccess 2026-02-19T08:17:50.791986Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableCorrectness" PathDescription { Self { Name: "TTLTableCorrectness" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableCorrectness" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-02-19T08:17:45.801227Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-02-19T08:17:45.947695Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:45.947815Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:45.947882Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.947966Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-02-19T08:17:45.984834Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [1:183:2195], now have 1 active actors on pipe 2026-02-19T08:17:45.984985Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:17:46.024266Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-02-19T08:17:46.024468Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.026618Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-02-19T08:17:46.026799Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:46.026934Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:46.027511Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:46.028107Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-02-19T08:17:46.033069Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:46.033156Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-02-19T08:17:46.033223Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-02-19T08:17:46.033321Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:46.047210Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:46.048716Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:17:46.048794Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:46.048843Z node 1 :PERSQUEUE DEBUG: partition.cpp:2438: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.048906Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:17:46.048959Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:46.049024Z node 1 :PERSQUEUE DEBUG: partition.cpp:2502: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.049097Z node 1 :PERSQUEUE DEBUG: partition.cpp:3799: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-02-19T08:17:46.049141Z node 1 :PERSQUEUE DEBUG: partition.cpp:3870: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-02-19T08:17:46.049178Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:17:46.049214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:17:46.049282Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][0][StateIdle] Try persist 2026-02-19T08:17:46.049554Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:17:46.049602Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-02-19T08:17:46.049671Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:46.061446Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:46.061802Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-02-19T08:17:46.064548Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:46.064633Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-02-19T08:17:46.064678Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-02-19T08:17:46.064732Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:46.066009Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:46.067091Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-02-19T08:17:46.067141Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:17:46.067171Z node 1 :PERSQUEUE DEBUG: partition.cpp:2438: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.067214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:17:46.067244Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:46.067292Z node 1 :PERSQUEUE DEBUG: partition.cpp:2502: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.067347Z node 1 :PERSQUEUE DEBUG: partition.cpp:3799: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-02-19T08:17:46.067400Z node 1 :PERSQUEUE DEBUG: partition.cpp:3870: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-02-19T08:17:46.067431Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:17:46.067456Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-02-19T08:17:46.067493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][1][StateIdle] Try persist 2026-02-19T08:17:46.067747Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:17:46.067783Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-02-19T08:17:46.067885Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:46.068129Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:17:46.068386Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:17:46.068617Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write reques ... ot data from cache. Partition 0 offset 6 partno 0 count 1 parts_count 10 source 1 size 5243650 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-02-19T08:17:51.476680Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 17. All 2 blobs are from cache. 2026-02-19T08:17:51.476760Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' 2026-02-19T08:17:51.476800Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 6 partno 0 count 1 parts 10 suffix '0' 2026-02-19T08:17:51.476898Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-02-19T08:17:51.477025Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477071Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477100Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477131Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477162Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 1 size 635356 from pos 0 cbcount 2 2026-02-19T08:17:51.477196Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477224Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477255Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477282Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477310Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477338Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477369Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477398Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477426Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477453Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 1 size 123358 from pos 0 cbcount 1 2026-02-19T08:17:51.477512Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477540Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477569Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477597Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477625Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477652Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477677Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477702Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477758Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477785Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:17:51.477813Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 1 size 123358 from pos 0 cbcount 1 2026-02-19T08:17:51.477925Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-02-19T08:17:51.477957Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-02-19T08:17:51.477993Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 4:6 isTruncatedBlob 0 2026-02-19T08:17:51.484699Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 4 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 1 2026-02-19T08:17:51.505696Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 5 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-02-19T08:17:51.509422Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-02-19T08:17:51.510550Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00000_0000000001_00016 2026-02-19T08:17:51.510653Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000001_00006_0000000002_00014 2026-02-19T08:17:51.510733Z node 3 :PERSQUEUE DEBUG: partition.cpp:4549: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-02-19T08:17:51.511003Z node 3 :PERSQUEUE DEBUG: partition.cpp:4557: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-02-19T08:17:51.511065Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:17:51.511181Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 283 2026-02-19T08:17:51.511275Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 6 count 2 size 271 2026-02-19T08:17:51.511313Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00016(+) to d0000000000_00000000000000000000_00000_0000000001_00016(+) 2026-02-19T08:17:51.511346Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000001_00006_0000000002_00014(+) to d0000000000_00000000000000000001_00006_0000000002_00014(+) 2026-02-19T08:17:51.515719Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [3:138:2142] 2026-02-19T08:17:51.515797Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 1 count 2 actorID [3:138:2142] 2026-02-19T08:17:51.515838Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 1 size 3072032 actorID [3:138:2142] is actual 1 2026-02-19T08:17:51.515918Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 1 size 283 actorID [3:138:2142] 2026-02-19T08:17:51.515973Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 4 count 2 size 7415109 actorID [3:138:2142] is actual 1 2026-02-19T08:17:51.516033Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 2 size 271 actorID [3:138:2142] 2026-02-19T08:17:51.516135Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 16 suffix '0' size 283 2026-02-19T08:17:51.516187Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 1 partno 6 count 2 parts 14 suffix '0' size 271 2026-02-19T08:17:51.516233Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 3072032 2026-02-19T08:17:51.523346Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 7415109 2026-02-19T08:17:51.533765Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 283 2026-02-19T08:17:51.533879Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 271 2026-02-19T08:17:51.534318Z node 3 :PERSQUEUE DEBUG: partition.cpp:2182: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:17:51.534368Z node 3 :PERSQUEUE DEBUG: partition.cpp:2190: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-02-19T08:17:51.534407Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response Write 3 done Got compacter offset = -1 2026-02-19T08:17:51.539239Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [3:315:2302], now have 1 active actors on pipe 2026-02-19T08:17:51.539317Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-02-19T08:17:51.539347Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-02-19T08:17:51.539436Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 6 for user __ydb_compaction_consumer 2026-02-19T08:17:51.539735Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [3:317:2304], now have 1 active actors on pipe Got start offset = 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize10-4 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:48.764607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:48.764716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:48.764800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:48.764846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:48.764885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:48.764923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:48.764958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:48.764988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 10, BatchMaxTime 0.100000s 2026-02-19T08:17:48.765069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:48.765144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:48.766031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:48.766335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:48.881948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:48.882014Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:48.897877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:48.907906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:48.908148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:48.926781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:48.927547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:48.928280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:48.928564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:48.931625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:48.931798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:48.932859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:48.932923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:48.933038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:48.933086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:48.933124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:48.933285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:49.084297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.085957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:49.086461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474 ... ersion: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:17:51.219603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:17:51.219733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:17:51.219859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.042000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.042000Z, at schemeshard: 72057594046678944 2026-02-19T08:17:51.219981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:17:51.220080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:17:51.220296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460042000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:17:51.222357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:17:51.254974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 1/10, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:17:51.255902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:17:51.256274Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 404us result status StatusSuccess 2026-02-19T08:17:51.257298Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:17:51.263775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:17:51.264108Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 348us result status StatusSuccess 2026-02-19T08:17:51.264658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] Test command err: 2026-02-19T08:15:32.813991Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487029070243723:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:32.814171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:33.038581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:33.056848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:33.056942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:33.125274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:33.183586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487029070243691:2081] 1771488932811613 != 1771488932811616 2026-02-19T08:15:33.185166Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:33.246091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:33.246123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:33.246136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:33.246247Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:33.306863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:33.448994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:33.656857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:15:33.664094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:15:33.737797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:33.823213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:33.844387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:33.895662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:33.952976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:15:33.958868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.002985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.042661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.102595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.135859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.177595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:34.215971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:35.885617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487041955147235:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:35.885617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487041955147240:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:35.885732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:35.886291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487041955147250:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:35.886367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:35.889468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:35.901273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487041955147249:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:15:35.974743Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487041955147302:3099] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:36.270254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:36.298782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:36.324719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:36.351717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:36.376237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp: ... :50.268857Z node 3 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:50.268893Z node 3 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 3ms 2026-02-19T08:17:50.269296Z node 3 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:50.269342Z node 3 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-02-19T08:17:50.269476Z node 3 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 4ms 2026-02-19T08:17:50.270033Z node 3 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-02-19T08:17:50.349439Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456264:2798], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.349534Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487622321456265:2799], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:50.349605Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.350891Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456268:2800], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.350955Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.355703Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487622321456262:2797]: Pool not found 2026-02-19T08:17:50.356509Z node 3 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-02-19T08:17:50.793570Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456284:2804], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.793654Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487622321456285:2805], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:50.793705Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.794587Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456288:2806], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.795378Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.802061Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487622321456282:2803]: Pool not found 2026-02-19T08:17:50.802787Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:17:50.806672Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456305:2811], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.806775Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [3:7608487622321456306:2812], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:17:50.806835Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.807864Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487622321456309:2813], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.807914Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.811782Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [3:7608487622321456303:2810]: Pool not found 2026-02-19T08:17:50.812421Z node 3 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:17:51.126450Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:246: (#37,[::1]:45402) connection closed by inactivity timeout >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.5%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> KqpRe2::IncorrectRegexNoError [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> HashShuffle::Compatibility+ShuffleElimination-HashV2 [GOOD] >> KqpPg::DropTablePgMultiple [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> TSequence::AlterSequence [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> HashShuffle::Compatibility-ShuffleElimination-HashV2 [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> KqpPg::DropTableIfExists >> TSequence::AlterTableSetDefaultFromSequence >> HashShuffle::Compatibility+ShuffleElimination+HashV2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> KqpRe2::IncorrectRegexWithoutExecutionNoError >> HashShuffle::Compatibility-ShuffleElimination+HashV2 >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003705/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk9 2026-02-19T08:17:46.399500Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487605338748137:2169];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:46.399574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:47.028026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:47.028119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:47.145070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:47.175606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:47.229885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487605338748006:2081] 1771489066388661 != 1771489066388664 2026-02-19T08:17:47.256472Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:47.454460Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:47.571655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:47.571674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:47.571680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:47.571961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:47.621640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:48.537389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:48.549614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:48.662091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:48.947867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.189145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.322117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.407915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487605338748137:2169];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:51.407992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:52.004301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487626813586384:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.004426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.004845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487626813586394:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.004886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.511774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.544814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.578361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.653493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.691902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.763646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.838524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.904440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:53.047619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487635403521862:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.047695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.048043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487635403521867:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.048078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487635403521868:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.048181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.052031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:53.077319Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487635403521871:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:53.141504Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487635403521922:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:55.304293Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487643993456842:2547], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2026-02-19T08:17:55.305411Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=N2JhNzY5NTAtNjJiNjY4MDAtOGE4MTRkMjktNmE1NTU1OTA=, ActorId: [1:7608487643993456835:2543], ActorState: ExecuteState, LegacyTraceId: 01khtfm8050wx1pn6fkgw4mk73, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 3 column: 40 } message: "Bad \"EnableSpillingNodes\" setting for \"$all\" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key \'GraceJoin1\' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: \'None\', \'GraceJoin\', \'Aggregation\', \'All\'. " end_position { row: 3 column: 40 } severity: 1 } } tx_id# trace_id# >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> TableCreation::UpdateTableWithAclRollback [GOOD] >> KqpPg::DropTableIfExists [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> KqpPg::TableSelect+useSink [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> KqpPg::TableSelect-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> TableCreation::UpdateTableAcl >> KqpProxy::NoLocalSessionExecution >> ReadSessionImplTest::DataReceivedCallbackReal >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |91.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |91.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskRackGuardFullRack [GOOD] Test command err: 2026-02-19T08:17:15.197078Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-02-19T08:17:15.197140Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-02-19T08:17:15.197200Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-02-19T08:17:15.197251Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-02-19T08:17:15.197303Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-02-19T08:17:15.197407Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-02-19T08:17:15.199352Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } Hosts { Name: "node-9" State: UNKNOWN Devices { Name: "pdisk-9-36" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-37" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-38" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-39" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 9 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-10" State: UNKNOWN Devices { Name: "pdisk-10-40" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-41" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-42" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-43" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 10 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-11" State: UNKNOWN Devices { Name: "pdisk-11-44" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-45" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-46" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-47" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 11 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-12" State: UNKNOWN Devices { Name: "pdisk-12-48" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-49" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-50" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-51" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 12 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-13" State: UNKNOWN Devices { Name: "pdisk-13-52" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-53" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-54" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-55" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 13 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-14" State: UNKNOWN Devices { Name: "pdisk-14-56" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-57" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-58" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-59" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 14 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-15" State: UNKNOWN Devices { Name: "pdisk-15-60" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-61" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-62" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-63" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 15 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-16" State: UNKNOWN Devices { Name: "pdisk-16-64" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-65" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-66" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-67" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 16 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-02-19T08:17:15.209459Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" G ... 374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.742900Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-96.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-97.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-98.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-99.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743116Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-100.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-101.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 102 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-102.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 103 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-103.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743247Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-104.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-105.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-106.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-107.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743376Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-108.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-109.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-110.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-111.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743496Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743624Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-116.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 117 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-117.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 118 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-118.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 119 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-119.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743748Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 120 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-120.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 121 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-121.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 122 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-122.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 123 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-123.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.743869Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 124 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-124.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 125 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-125.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 126 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-126.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 127 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-127.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.744001Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 128 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-128.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 129 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-129.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.744129Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 72 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-72.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 73 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-73.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 74 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-74.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 75 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-75.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-02-19T08:17:54.744205Z node 17 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-02-19T08:17:54.744943Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 31:124, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745000Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 31:125, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745037Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 31:126, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745070Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 31:127, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745103Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 23:92, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745135Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 23:93, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745166Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 23:94, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745199Z node 17 :CMS NOTICE: sentinel.cpp:1149: [Sentinel] [Main] PDisk status changed: pdiskId# 23:95, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-02-19T08:17:54.745242Z node 17 :CMS DEBUG: sentinel.cpp:1216: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2026-02-19T08:17:54.745628Z node 17 :CMS DEBUG: sentinel.cpp:1347: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 20 2026-02-19T08:17:54.745667Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:92 2026-02-19T08:17:54.745699Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:93 2026-02-19T08:17:54.745740Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:94 2026-02-19T08:17:54.745762Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 23:95 2026-02-19T08:17:54.745784Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:124 2026-02-19T08:17:54.745810Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:125 2026-02-19T08:17:54.745832Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:126 2026-02-19T08:17:54.745854Z node 17 :CMS NOTICE: sentinel.cpp:1371: [Sentinel] [Main] PDisk status has been changed: pdiskId# 31:127 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:42.034276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:42.034377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:42.034444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:42.034486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:42.034522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:42.034585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:42.034622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:42.034652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:42.034733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:42.034825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:42.035711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:42.036025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:42.137178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:42.137242Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:42.148526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:42.149209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:42.149386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:42.159614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:42.160588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:42.161397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:42.161764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:42.179683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:42.179919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:42.181334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:42.181433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:42.181565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:42.181614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:42.181660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:42.181879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:42.365150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.374981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.375587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... y parts: 3/4 2026-02-19T08:17:56.511934Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 3/4 2026-02-19T08:17:56.511981Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2026-02-19T08:17:56.512032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2026-02-19T08:17:56.518433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.518499Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-02-19T08:17:56.518595Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:758:2734] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-02-19T08:17:56.519168Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.519223Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.519356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:17:56.519401Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:17:56.519479Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:17:56.519536Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:17:56.519861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:17:56.519997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:17:56.520037Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-02-19T08:17:56.520073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-02-19T08:17:56.520120Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-02-19T08:17:56.520153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-02-19T08:17:56.520190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2026-02-19T08:17:56.520277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:830:2786] message: TxId: 102 2026-02-19T08:17:56.520357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-02-19T08:17:56.520424Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:17:56.520495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:17:56.520653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:17:56.520708Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-02-19T08:17:56.520734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:1 2026-02-19T08:17:56.520771Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:17:56.520800Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-02-19T08:17:56.520826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:2 2026-02-19T08:17:56.520875Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:17:56.520907Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:3 2026-02-19T08:17:56.520932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:3 2026-02-19T08:17:56.520988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:17:56.521629Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435084, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:17:56.521687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5494: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-02-19T08:17:56.521818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:17:56.521880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:17:56.521988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:17:56.522967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.523010Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.523086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.523116Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.523157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.523222Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.523284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.523310Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.534426Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:17:56.534491Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.551832Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.552022Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:17:56.552122Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:830:2786] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-02-19T08:17:56.552326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:17:56.552393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:937:2885] 2026-02-19T08:17:56.552527Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:17:56.552773Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:939:2887], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:17:56.552824Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:17:56.552860Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-02-19T08:17:56.560375Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [7:1017:2965], Recipient [7:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:17:56.560465Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:17:56.560629Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:17:56.560948Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 321us result status StatusPathDoesNotExist 2026-02-19T08:17:56.561178Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000039, drop txId: 102" Path: "/MyRoot/Table" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:56.295431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:56.295529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:56.295601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:56.295649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:56.295687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:56.295723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:56.295763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:56.295795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:56.295874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:56.295942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:56.296931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:56.297275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:56.395677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:56.395764Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:56.409489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:56.410209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:56.410416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:56.431708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:56.432755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:56.433549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:56.434743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:56.442323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:56.442545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:56.443767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:56.443842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:56.443941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:56.443992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:56.444032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:56.444209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:56.609610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.611984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.612906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.613017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.613101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:17:57.260754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:17:57.263008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:17:57.263080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:17:57.263169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:17:57.264978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:17:57.265035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:17:57.265078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:17:57.265128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:17:57.265291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:17:57.267125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:17:57.267267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:17:57.267593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:57.267725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:17:57.267776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:17:57.268211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:17:57.268270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:17:57.268433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:17:57.268511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:57.270324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:57.270374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:57.270595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:57.270660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:17:57.271052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:17:57.271103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:17:57.271202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:17:57.271251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:17:57.271295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:17:57.271326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:17:57.271365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:17:57.271405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:17:57.271445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:17:57.271476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:17:57.271555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:17:57.271597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:17:57.271633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:17:57.272230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:17:57.272334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:17:57.272374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:17:57.272415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:17:57.272459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:17:57.272554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:17:57.275598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:17:57.276119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1771489077.282791 287148 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-02-19T08:17:57.283369Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2659] Bootstrap 2026-02-19T08:17:57.284615Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2659] Become StateWork (SchemeCache [1:676:2664]) 2026-02-19T08:17:57.286720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:17:57.287150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:17:57.287334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2026-02-19T08:17:57.287841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1771489077 seconds (20503 days, 56 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2026-02-19T08:17:57.289686Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:17:57.293557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1771489077 seconds (20503 days, 56 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:17:57.293848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1771489077 seconds (20503 days, 56 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:17:57.294353Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-02-19T08:17:56.170748Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.170783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.170812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:56.197194Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:56.204893Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:56.246012Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.250060Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:17:56.253684Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:17:56.254238Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:17:56.257916Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-02-19T08:17:56.258037Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:17:56.260850Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:17:56.260896Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-02-19T08:17:56.260943Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:17:56.260982Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:17:56.268078Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.268110Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.268140Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:56.268530Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:56.278007Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:56.278251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.278635Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-02-19T08:17:56.279640Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:17:56.279887Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:17:56.280240Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:17:56.280463Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:17:56.285855Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:17:56.285914Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:17:56.285964Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:17:56.286165Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-02-19T08:17:56.286284Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:17:56.286308Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:17:56.286328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:17:56.286450Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-02-19T08:17:56.286519Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-02-19T08:17:56.286541Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-02-19T08:17:56.286559Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:17:56.286636Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-02-19T08:17:56.286684Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-02-19T08:17:56.286707Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-02-19T08:17:56.286737Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:17:56.286854Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-02-19T08:17:56.298925Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.298967Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.305808Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:56.310796Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:56.311405Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:56.311612Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:56.313946Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2026-02-19T08:17:56.315097Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:17:56.315337Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:17:56.315688Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:17:56.315932Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:17:56.316053Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:17:56.316097Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:17:56.316216Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2026-02-19T08:17:56.316306Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:17:56.316326Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:17:56.316378Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2026-02-19T08:17:56.316440Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:17:56.316461Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:17:56.316533Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2026-02-19T08:17:56.316597Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:17:56.316616Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:17:58.731199Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2026-02-19T08:17:58.807730Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:17:58.807852Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:17:58.808049Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:58.808694Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:58.810385Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:58.810709Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:17:58.811126Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-02-19T08:17:58.926596Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-02-19T08:17:58.927832Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:17:58.929858Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:17:58.932904Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:17:58.933879Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-02-19T08:17:58.947566Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-02-19T08:17:58.948458Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-02-19T08:17:58.949319Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-02-19T08:17:58.950210Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-02-19T08:17:58.974515Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-02-19T08:17:58.975457Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-02-19T08:17:58.975550Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-02-19T08:17:58.975802Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:17:58.979664Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2026-02-19T08:17:58.991441Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.991556Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.991641Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:58.991963Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:58.993139Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:58.993634Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.994491Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:17:58.995056Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-02-19T08:17:58.996377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.996437Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.996477Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:17:58.996790Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:17:58.997246Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:17:58.997383Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.998037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:17:58.998213Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:17:58.998310Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:17:58.998352Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:17:58.998740Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-02-19T08:17:46.388537Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-02-19T08:17:46.499739Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:46.499850Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:46.499916Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.499990Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-02-19T08:17:46.571927Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [1:183:2195], now have 1 active actors on pipe 2026-02-19T08:17:46.572047Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:17:46.627287Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-02-19T08:17:46.627492Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.629292Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-02-19T08:17:46.629464Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:46.629558Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:17:46.630115Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:46.630568Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-02-19T08:17:46.633325Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:46.633394Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-02-19T08:17:46.633467Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-02-19T08:17:46.633533Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:46.635689Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:46.637047Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:17:46.637136Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:17:46.637187Z node 1 :PERSQUEUE DEBUG: partition.cpp:2438: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.637240Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:17:46.637277Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:46.637345Z node 1 :PERSQUEUE DEBUG: partition.cpp:2502: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.637418Z node 1 :PERSQUEUE DEBUG: partition.cpp:3799: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-02-19T08:17:46.637462Z node 1 :PERSQUEUE DEBUG: partition.cpp:3870: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-02-19T08:17:46.637500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:17:46.637537Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:17:46.637610Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][0][StateIdle] Try persist 2026-02-19T08:17:46.638166Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:17:46.638230Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-02-19T08:17:46.638300Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:46.638517Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:17:46.638763Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-02-19T08:17:46.641095Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:17:46.641158Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-02-19T08:17:46.641194Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-02-19T08:17:46.641246Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:17:46.644001Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:17:46.645141Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-02-19T08:17:46.645187Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:17:46.645219Z node 1 :PERSQUEUE DEBUG: partition.cpp:2438: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.645261Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:17:46.645290Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:17:46.645364Z node 1 :PERSQUEUE DEBUG: partition.cpp:2502: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:17:46.645426Z node 1 :PERSQUEUE DEBUG: partition.cpp:3799: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-02-19T08:17:46.645470Z node 1 :PERSQUEUE DEBUG: partition.cpp:3870: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-02-19T08:17:46.645505Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:17:46.645533Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-02-19T08:17:46.645568Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][1][StateIdle] Try persist 2026-02-19T08:17:46.645782Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:17:46.645820Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-02-19T08:17:46.645884Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:17:46.646100Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:17:46.646268Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:17:46.646570Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write reques ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-02-19T08:18:00.585806Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585839Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585859Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585878Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585896Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585916Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585934Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585951Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.585968Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-02-19T08:18:00.586067Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-02-19T08:18:00.586092Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-02-19T08:18:00.586122Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:0 isTruncatedBlob 1 2026-02-19T08:18:00.607968Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:0 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-02-19T08:18:00.608099Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 3:16 2026-02-19T08:18:00.608451Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 28 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 3 partno 16 count 4294967295 size 4294967295 endOffset 4 max time lag 0ms effective offset 3 2026-02-19T08:18:00.608852Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 28 added 2 blobs, size 12781161 count 1 last offset 3, current partition end offset: 4 2026-02-19T08:18:00.608897Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 28. Send blob request. 2026-02-19T08:18:00.608996Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 16 count 0 parts_count 16 source 0 size 8191635 accessed 1 times before, last time 1970-01-01T00:00:00.000000Z 2026-02-19T08:18:00.609041Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 32 count 1 parts_count 8 source 1 size 4589526 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-02-19T08:18:00.609091Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 28. All 2 blobs are from cache. 2026-02-19T08:18:00.609162Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2026-02-19T08:18:00.609205Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2026-02-19T08:18:00.609294Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-02-19T08:18:00.609423Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-02-19T08:18:00.609548Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609578Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609607Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609637Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609663Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609692Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609738Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609768Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-02-19T08:18:00.609797Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-02-19T08:18:00.609914Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-02-19T08:18:00.609950Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-02-19T08:18:00.609987Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:16 isTruncatedBlob 1 2026-02-19T08:18:00.641441Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:16 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 1 2026-02-19T08:18:00.661011Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00032_0000000001_00015 2026-02-19T08:18:00.661162Z node 3 :PERSQUEUE DEBUG: partition.cpp:4549: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-02-19T08:18:00.661422Z node 3 :PERSQUEUE DEBUG: partition.cpp:4557: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-02-19T08:18:00.661486Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:18:00.661612Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 0 size 271 2026-02-19T08:18:00.661701Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 32 count 1 size 187 2026-02-19T08:18:00.661806Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 0 size 8191590 2026-02-19T08:18:00.661843Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000002_00016_0000000000_00016(+) to d0000000000_00000000000000000002_00016_0000000000_00016(+) 2026-02-19T08:18:00.661875Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00032_0000000001_00015(+) to d0000000000_00000000000000000000_00032_0000000001_00015(+) 2026-02-19T08:18:00.718587Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 0 size 271 actorID [3:138:2142] 2026-02-19T08:18:00.718654Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 2 count 1 size 4589526 actorID [3:138:2142] is actual 1 2026-02-19T08:18:00.718723Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 187 actorID [3:138:2142] 2026-02-19T08:18:00.718752Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 0 size 8191635 actorID [3:138:2142] is actual 1 2026-02-19T08:18:00.718824Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 0 size 8191590 actorID [3:138:2142] 2026-02-19T08:18:00.719020Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 4589526 2026-02-19T08:18:00.727362Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191635 2026-02-19T08:18:00.739960Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 0 parts 16 suffix '0' size 271 2026-02-19T08:18:00.740056Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 187 2026-02-19T08:18:00.740093Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191590 2026-02-19T08:18:00.740411Z node 3 :PERSQUEUE DEBUG: partition.cpp:2182: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:18:00.740462Z node 3 :PERSQUEUE DEBUG: partition.cpp:2190: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-02-19T08:18:00.740501Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-02-19T08:18:00.757771Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [3:408:2382], now have 1 active actors on pipe 2026-02-19T08:18:00.757939Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-02-19T08:18:00.757994Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-02-19T08:18:00.758130Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 3 for user __ydb_compaction_consumer 2026-02-19T08:18:00.758549Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037927937] server connected, pipe [3:410:2384], now have 1 active actors on pipe Got start offset = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003704/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk6 2026-02-19T08:17:47.723935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487610690405204:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:47.723992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:48.172348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:48.226589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:48.226679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:48.353824Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487610690404954:2081] 1771489067657245 != 1771489067657248 2026-02-19T08:17:48.404204Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:48.412915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:48.549847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:48.733935Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:48.826222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:48.826242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:48.826247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:48.826314Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:49.810756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:49.827529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:49.959476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.304265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.519867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.628645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.726132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487610690405204:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:52.726190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:53.403516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487636460210612:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.403612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.403926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487636460210622:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:53.403964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.052774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.102347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.166512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.234876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.307980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.367010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.478488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.566197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:54.697739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487640755178795:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.697851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.698305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487640755178800:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.698358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487640755178801:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.698465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:54.702778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:54.721061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487640755178804:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:54.784707Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487640755178857:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($18) (block '( (let $19 (lambda '($20) (block '( (let $21 (VariantType (TupleType $6 $6))) (let $22 (Variant $20 '0 $21)) (let $23 (Variant $20 '1 $21)) (return $22 $23) )))) (return (FromFlow (MultiMap (ToFlow $18) $19))) ))) '('('"_logical_id" '703) '('"_id" '"73cb0b5d-f7459df3-6ed7fd0f-3b4a4146")))) (let $8 (DqCnMap (TDqOutput $7 '0))) (let $9 (DqCnBroadcast (TDqOutput $7 '1))) (let $10 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $11 '('('"_logical_id" '618) '('"_id" '"93992d0b-8c06c616-9b518f5-30892cf9") '('"_wide_channels" $10))) (let $12 (DqPhyStage '($8 $9) (lambda '($24 $25) (block '( (let $26 '('Many 'Hashed 'Compact)) (let $27 (SqueezeToDict (FlatMap (ToFlow $25) (lambda '($30) (block '( (let $31 (Member $30 '"Value")) (let $32 (Nothing (OptionalType (TupleType $4 $6)))) (let $33 (IfPresent $31 (lambda '($34) (Just '($34 $30))) $32)) (return (If (Exists $31) $33 $32)) )))) (lambda '($35) (Nth $35 '0)) (lambda '($36) (Nth $36 '1)) $26)) (let $28 (Sort (FlatMap $27 (lambda '($37) (block '( (let $38 '('"Value")) (let $39 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $40 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $24) (lambda '($41) (Exists (Member $41 '"Value")))) $37 'Inner $38 $38 $39 $40 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($42) (Member $42 '"t1.Key")))) (let $29 (lambda '($43) (Member $43 '"t1.Key") (Member $43 '"t1.Value") (Member $43 '"t2.Key") (Member $43 '"t2.Value"))) (return (FromFlow (ExpandMap $28 $29))) ))) $11)) (let $13 (DqCnMerge (TDqOutput $12 '0) '('('0 '"Asc")))) (let $14 (DqPhyStage '($13) (lambda '($44) (FromFlow (NarrowMap (ToFlow $44) (lambda '($45 $46 $47 $48) (AsStruct '('"t1.Key" $45) '('"t1.Value" $46) '('"t2.Key" $47) '('"t2.Value" $48)))))) '('('"_logical_id" '630) '('"_id" '"244c2d32-bb0745af-e3be1b19-8595a41b")))) (let $15 '($7 $12 $14)) (let $16 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $17 (DqCnResult (TDqOutput $14 '0) $16)) (return (KqpPhysicalQuery '((KqpPhysicalTx $15 '($17) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $10) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:42.483662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:42.483755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:42.483851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:42.483895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:42.483936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:42.483973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:42.484007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:42.484037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:42.484127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:42.484208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:42.485162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:42.485506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:42.576141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:42.576198Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:42.593454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:42.596035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:42.596262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:42.611829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:42.612736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:42.613641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:42.614001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:42.618981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:42.619181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:42.620459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:42.620558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:42.620684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:42.620734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:42.620778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:42.620965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:42.773618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.775930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.776007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:42.776105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Id [OwnerId: 72057594046678944, LocalPathId: 42] was 4 2026-02-19T08:18:00.775860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2026-02-19T08:18:00.775913Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:18:00.782824Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [7:1461:3395], Recipient [7:128:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:00.782975Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:00.783013Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:18:00.783955Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269551620, Sender [7:1401:3343], Recipient [7:128:2152]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1401 RawX2: 30064774415 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-02-19T08:18:00.784015Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5330: StateWork, processing event TEvDataShard::TEvSchemaChanged 2026-02-19T08:18:00.784120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1401 RawX2: 30064774415 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-02-19T08:18:00.784184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2026-02-19T08:18:00.784374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1401 RawX2: 30064774415 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-02-19T08:18:00.784446Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:18:00.784583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1401 RawX2: 30064774415 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-02-19T08:18:00.784676Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:00.784722Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2026-02-19T08:18:00.784768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:18:00.784839Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 114:0 ProposedWaitParts -> Done 2026-02-19T08:18:00.785037Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:18:00.791992Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.798339Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-02-19T08:18:00.798433Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.799164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-02-19T08:18:00.799201Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.799337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-02-19T08:18:00.799368Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.803037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-02-19T08:18:00.803108Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.803155Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2026-02-19T08:18:00.803282Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1401:3343] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2026-02-19T08:18:00.803765Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [7:128:2152], Recipient [7:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:18:00.803811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:18:00.803892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2026-02-19T08:18:00.803941Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 114:0 ProgressState 2026-02-19T08:18:00.804068Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:18:00.804108Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-02-19T08:18:00.804154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-02-19T08:18:00.804198Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-02-19T08:18:00.804265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-02-19T08:18:00.804321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2026-02-19T08:18:00.804407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:798:2763] message: TxId: 114 2026-02-19T08:18:00.804477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-02-19T08:18:00.804523Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 114:0 2026-02-19T08:18:00.804567Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 114:0 2026-02-19T08:18:00.804718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-02-19T08:18:00.808947Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:18:00.809071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:798:2763] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2026-02-19T08:18:00.809273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-02-19T08:18:00.809350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1430:3364] 2026-02-19T08:18:00.809578Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [7:1432:3366], Recipient [7:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:18:00.809629Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:18:00.809657Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2026-02-19T08:18:00.810887Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [7:1472:3406], Recipient [7:128:2152]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2026-02-19T08:18:00.810971Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:18:00.813303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:18:00.813581Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2026-02-19T08:18:00.814086Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2026-02-19T08:18:00.814328Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:18:00.817235Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:18:00.817532Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2026-02-19T08:18:00.817594Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |91.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |91.5%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> KqpRe2::IncorrectRegexWithoutExecutionNoError [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpProxy::DatabasesCacheForServerless >> PhantomBlobs::TestDisabling [GOOD] >> PhantomBlobs::TestEnabling >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=noset >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpRe2::IncorrectRegexWithoutExecutionNoError [GOOD] Test command err: 2026-02-19T08:17:46.324838Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487605570958954:2216];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:46.324889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:46.934351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:46.934461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:46.986685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:47.037884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487605570958757:2081] 1771489066258544 != 1771489066258547 2026-02-19T08:17:47.076522Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:47.089539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:47.311277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:47.335413Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:47.458378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:47.458405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:47.458412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:47.458497Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:48.215971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:48.352988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:48.716854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.040216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.183369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.328066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487605570958954:2216];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:51.334806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:52.038424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487631340764420:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.038706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.039109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487631340764430:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.039159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.465632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.504581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.539927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.578063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.654494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.711780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.782769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.855371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:52.962026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487631340765307:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.962122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.962499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487631340765311:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.962556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.962780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487631340765315:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:52.969097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:52.984682Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487631340765317:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:53.092362Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487635635732664:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (let $1 (DataType 'Bool)) (let $2 '('('"_logical_id" '236) '('"_id" '"cf68491e-f037747a-3699601a-1e6dc8cd") '('"_partition_mode" '"single"))) (let $3 (DqPhyStage '() (lambda '() (block '( (let $5 (String '"a[x")) (let $6 (OptionalType (StructType '('"CaseSensitive" $1) '('"DotNl" $1) '('"Literal" $1) '('"LogErrors" $1) '('"LongestMatch" $1) '('"MaxMem" (DataType 'Uint64)) '('"NeverCapture" $1) '( ... 487648756142450:2081] 1771489076743520 != 1771489076743523 2026-02-19T08:17:56.923009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:57.060489Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:57.118447Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:57.118472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:57.118481Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:57.118572Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:57.628776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:57.641372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:57.656835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:57.730905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:57.842524Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:57.912468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:58.001612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.407082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487665936013510:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.407182Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.407417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487665936013520:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.407456Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.488533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.533267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.568864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.624139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.671665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.733113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.798209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.878692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:00.982576Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487665936014400:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.982685Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.985189Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487665936014405:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.985248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487665936014406:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.985354Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:00.989981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:01.009989Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487665936014409:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:18:01.086242Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487670230981756:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:01.745023Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487648756142480:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:01.745108Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($10) (FromFlow (Filter (ToFlow $10) (lambda '($11) (block '( (let $12 (DataType 'Bool)) (let $13 (OptionalType (StructType '('"CaseSensitive" $12) '('"DotNl" $12) '('"Literal" $12) '('"LogErrors" $12) '('"LongestMatch" $12) '('"MaxMem" $3) '('"NeverCapture" $12) '('"NeverNl" $12) '('"OneLine" $12) '('"PerlClasses" $12) '('"PosixSyntax" $12) '('"Utf8" $12) '('"WordBoundary" $12)))) (let $14 (CallableType '() '($12) '($5))) (let $15 (Udf '"Re2.Grep" '((String '"[") (Nothing $13)) (VoidType) '"" $14 (TupleType $4 $13) '"" '())) (return (Or (Coalesce (== (Member $11 '"Key") (Int32 '1)) (Bool 'false)) (Apply $15 (Member $11 '"Value")))) )))))) '('('"_logical_id" '494) '('"_id" '"2beb5bc-274f847c-41f108a3-474c508d")))) (let $7 (DqCnUnionAll (TDqOutput $6 '"0"))) (let $8 (DqPhyStage '($7) (lambda '($16) $16) '('('"_logical_id" '578) '('"_id" '"252a38dc-5a76c913-936dfa1b-2097e29b")))) (let $9 (DqCnResult (TDqOutput $8 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($6 $8) '($9) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"Key" (OptionalType $3)) '('"Value" $5))) '"0" '"0")) '('('"type" '"query")))) ) >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> HashShuffle::Compatibility+ShuffleElimination+HashV2 [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=foo ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-02-19T08:18:06.580816Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.580852Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.580878Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.581326Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:18:06.581378Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.581422Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.582526Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008051s 2026-02-19T08:18:06.583102Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.583860Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.583927Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.585604Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.585627Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.585655Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.586047Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:18:06.586098Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.586123Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.586198Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007443s 2026-02-19T08:18:06.587514Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.587929Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.588121Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.589019Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.589041Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.589060Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.589518Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:18:06.589554Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.589580Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.589643Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.175705s 2026-02-19T08:18:06.590476Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.594259Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.594367Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.595896Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.595918Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.595940Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.596350Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:18:06.596405Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.596437Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.596504Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.209289s 2026-02-19T08:18:06.597195Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.597687Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.597805Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.604286Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.604314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.604332Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.604667Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.605134Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.618486Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.619767Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-02-19T08:18:06.619807Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.619831Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.619892Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.175840s 2026-02-19T08:18:06.620095Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:18:06.621679Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.621728Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.621755Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.622138Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.622697Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.622906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.624876Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.724426Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.726010Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:18:06.726120Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.726180Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:18:06.726271Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:18:06.828376Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:18:06.828597Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:18:06.829906Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.829931Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.829952Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.830298Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.836072Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.836314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.842493Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.944940Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.945163Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:18:06.945228Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.945290Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:18:06.945358Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-02-19T08:18:06.945447Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:18:06.947802Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:18:06.947910Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:18:06.952345Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-02-19T08:18:06.508264Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.508306Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.508356Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.508735Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.509267Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.509340Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.510282Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.510306Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.510348Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.517060Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.517671Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:18:06.517747Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.518798Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.519221Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.519258Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.519694Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:18:06.519743Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.519769Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.519931Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-02-19T08:18:06.520798Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.520850Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.520867Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.521246Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:18:06.521284Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.521312Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.521371Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-02-19T08:18:06.522392Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:18:06.522422Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:18:06.522491Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.522792Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.523281Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.535867Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:18:06.536654Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.537041Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-02-19T08:18:06.540665Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-02-19T08:18:06.540941Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.540991Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:18:06.541021Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:18:06.541040Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-02-19T08:18:06.541070Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-02-19T08:18:06.541086Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-02-19T08:18:06.541116Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-02-19T08:18:06.541141Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-02-19T08:18:06.541172Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-02-19T08:18:06.541189Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-02-19T08:18:06.541206Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-02-19T08:18:06.541222Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-02-19T08:18:06.541238Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-02-19T08:18:06.541254Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-02-19T08:18:06.541271Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-02-19T08:18:06.541306Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-02-19T08:18:06.541371Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-02-19T08:18:06.541390Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-02-19T08:18:06.541407Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-02-19T08:18:06.541424Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-02-19T08:18:06.541440Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-02-19T08:18:06.541467Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-02-19T08:18:06.541489Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-02-19T08:18:06.541504Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-02-19T08:18:06.541524Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-02-19T08:18:06.541541Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-02-19T08:18:06.541557Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-02-19T08:18:06.541574Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-02-19T08:18:06.541590Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-02-19T08:18:06.541615Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-02-19T08:18:06.541641Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-02-19T08:18:06.541658Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-02-19T08:18:06.541735Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-02-19T08:18:06.541755Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-02-19T08:18:06.541773Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-02-19T08:18:06.541793Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-02-19T08:18:06.541810Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-02-19T08:18:06.541827Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-02-19T08:18:06.541856Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-02-19T08:18:06.541881Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-02-19T08:18:06.541913Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-02-19T08:18:06.541987Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-02-19T08:18:06.542003Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-02-19T08:18:06.542020Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-02-19T08:18:06.542038Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-02-19T08:18:06.542054Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-02-19T08:18:06.542070Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-02-19T08:18:06.542102Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-02-19T08:18:06.542122Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-02-19T08:18:06.542154Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-02-19T08:18:06.542238Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-02-19T08:18:06.544978Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-02-19T08:18:06.545313Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-02-19T08:18:06.545353Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-02-19T08:18:06.545374Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-02-19T08:18:06.545402Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-02-19T08:18:06.545436Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-02-19T08:18:06.545459Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-02-19T08:18:06.545491Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-02-19T08:18:06.545507Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-02-19T08:18:06.545553Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-02-19T08:18:06.545581Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-02-19T08:18:06.545605Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-02-19T08:18:06.545622Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-02-19T08:18:06.545638Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-02-19T08:18:06.545654Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-02-19T08:18:06.545670Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-02-19T08:18:06.545685Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-02-19T08:18:06.545752Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-02-19T08:18:06.545778Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-02-19T08:18:06.545794Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-02-19T08:18:06.545820Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-02-19T08:18:06.545846Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-02-19T08:18:06.545863Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-02-19T08:18:06.545895Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-02-19T08:18:06.545925Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-02-19T08:18:06.545945Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-02-19T08:18:06.545960Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-02-19T08:18:06.545977Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-02-19T08:18:06.545993Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-02-19T08:18:06.546010Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-02-19T08:18:06.546026Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-02-19T08:18:06.546042Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-02-19T08:18:06.546058Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-02-19T08:18:06.546125Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-02-19T08:18:06.546151Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-02-19T08:18:06.546179Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-02-19T08:18:06.546211Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-02-19T08:18:06.546233Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-02-19T08:18:06.546254Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-02-19T08:18:06.546288Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-02-19T08:18:06.546303Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-02-19T08:18:06.546320Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-02-19T08:18:06.546339Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-02-19T08:18:06.546359Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-02-19T08:18:06.546375Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-02-19T08:18:06.546390Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-02-19T08:18:06.546404Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-02-19T08:18:06.546419Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-02-19T08:18:06.546436Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-02-19T08:18:06.546457Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-02-19T08:18:06.546473Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-02-19T08:18:06.546525Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-02-19T08:18:06.546689Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:18:06.548383Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.548420Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.548447Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.548900Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.549379Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.549601Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.550059Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.653388Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.653638Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:18:06.653699Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.653770Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:18:06.653849Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:18:06.856936Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-02-19T08:18:06.958462Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:18:06.960364Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:18:06.960781Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:18:06.962010Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.962033Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.962054Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.963350Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.966603Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.966802Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.967448Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:07.070670Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:07.071141Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:18:07.071206Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:07.071248Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:18:07.071326Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-02-19T08:18:07.071436Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:18:07.071627Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:18:07.071709Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:18:07.071836Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-02-19T08:18:06.931715Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.931775Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.931804Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.934005Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.934573Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.958499Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.958989Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.962646Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.962670Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.962693Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.966175Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.966725Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.966933Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.967352Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.967641Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:18:06.968769Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.968791Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.968810Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.969370Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.970262Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.970388Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.970563Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.971222Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.971517Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:06.971622Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.971675Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:18:06.972752Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.972781Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.972808Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.973166Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.973680Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.973844Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.974082Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-02-19T08:18:06.974994Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:06.975229Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:18:06.980094Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:18:06.980336Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:18:06.980691Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.980730Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:18:06.980767Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:18:06.980933Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-02-19T08:18:06.981043Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:18:06.981068Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:18:06.981088Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:18:06.981217Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-02-19T08:18:06.981278Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-02-19T08:18:06.981299Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-02-19T08:18:06.981319Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:18:06.981387Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-02-19T08:18:06.981428Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-02-19T08:18:06.981447Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-02-19T08:18:06.981465Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:18:06.981555Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-02-19T08:18:06.983258Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.983291Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.983322Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.983627Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.984101Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.984281Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.986372Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-02-19T08:18:06.987337Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:06.987528Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:18:06.987851Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:18:06.988083Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:18:06.988268Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:06.988298Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:18:06.988367Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:18:06.988384Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:18:06.988420Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:18:06.988639Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2026-02-19T08:18:06.988754Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-02-19T08:18:06.988780Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-02-19T08:18:06.988818Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-02-19T08:18:06.988850Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-02-19T08:18:06.988880Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:18:06.989027Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2026-02-19T08:18:06.990497Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.990550Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.990576Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:06.990850Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:06.991273Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:06.991435Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:06.992584Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:06.993690Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:06.994526Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:07.000629Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-02-19T08:18:07.000773Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:18:07.000903Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:07.000961Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:18:07.000988Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-02-19T08:18:07.001006Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-02-19T08:18:07.001051Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-02-19T08:18:07.001075Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-02-19T08:18:07.001291Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2026-02-19T08:18:07.001464Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=foo [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> HashShuffle::Compatibility+ShuffleElimination+HashV2 [GOOD] Test command err: 2026-02-19T08:17:44.161099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487596052446567:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:44.161499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:44.524958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:44.572650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:44.572786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:44.608300Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:44.628291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.811693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:44.922277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:44.922310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:44.922322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:44.922416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:45.174027Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:45.668915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:45.681189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:45.748117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:45.966719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.252843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.457265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:48.955899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487613232317492:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:48.956017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:48.956961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487613232317503:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:48.957017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.157810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487596052446567:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:49.157881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:49.562552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.598620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.659246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.710271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.756519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.810222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.869059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.920282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.011595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621822252972:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.011674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.011853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621822252977:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.011885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621822252978:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.011981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.015762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:50.029200Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487621822252981:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:17:50.132552Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487621822253032:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:51.861958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:17:51.976811Z node 1 :TX_COLUMN ... 841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:18:03.033167Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:18:03.033186Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:18:03.033248Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:18:03.033278Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:18:03.033310Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:18:03.033327Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:18:03.033369Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:18:03.033387Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:18:03.033518Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:18:03.033541Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:18:03.034297Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:18:03.034344Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:18:03.034390Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:18:03.034435Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:18:03.034472Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:18:03.034502Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:18:03.034620Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:18:03.034652Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-02-19T08:18:03.034748Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-02-19T08:18:03.034784Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:18:03.040730Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[2:7608487673303824599:2574];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976715674;this=136530019272608;method=TTxController::StartProposeOnExecute;tx_info=281474976715674:TX_KIND_SCHEMA;min=1771489083040;max=18446744073709551615;plan=0;src=[2:7608487647534017766:2154];cookie=462:14;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044274Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044273Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044343Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.044353Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.071625Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.071698Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.071716Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.073404Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.073483Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:03.073500Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; ( (let $1 (lambda '($34 $35) $34 $35)) (let $2 (DataType 'Uint32)) (let $3 '('"k2" (OptionalType $2))) (let $4 '('('"_logical_id" '1995) '('"_id" '"bafc8185-ac08d8c4-222b686-dff8c693") '('"_wide_channels" (StructType '('"k1" $2) $3)))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/Table1" '"72057594046644480:54" '"" '1)) (let $29 (KqpBlockReadOlapTableRanges $28 (Void) '('"k1" '"k2") '() '() (lambda '($33) $33))) (let $30 (lambda '($36 $37 $38 $39) $36 $37)) (let $31 (lambda '($40 $41 $42 $43 $44 $45) $44 $45)) (let $32 (lambda '($46 $47 $48 $49) $48 $49)) (return (FromFlow (DqPhyHashCombine (ToFlow (WideFromBlocks (FromFlow $29))) '-1073741824 $1 $30 $31 $32))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0 '1) '0 '"HashV2")) (let $7 '('('"_logical_id" '2341) '('"_id" '"10d16d35-ae472557-9b8eeec4-8bb3a13c") '('"_wide_channels" (StructType $3)))) (let $8 (DqPhyStage '($6) (lambda '($50) (block '( (let $51 (lambda '($52 $53 $54 $55))) (return (FromFlow (DqPhyHashCombine (ToFlow $50) '"" $1 $51 $51 (lambda '($56 $57) $57)))) ))) $7)) (let $9 '('('"_logical_id" '3387) '('"_id" '"20396929-aa856170-93daabf5-6b6f9b94") '('"_wide_channels" (StructType $3 '('"k3" $2))))) (let $10 (DqPhyStage '() (lambda '() (block '( (let $58 (KqpTable '"/Root/Table2" '"72057594046644480:55" '"" '1)) (let $59 (KqpBlockReadOlapTableRanges $58 (Void) '('"k2" '"k3") '() '() (lambda '($60) $60))) (return (FromFlow $59)) ))) $9)) (let $11 '('0)) (let $12 (DqCnHashShuffle (TDqOutput $8 '0) $11 '1 '"HashV2")) (let $13 (DqCnHashShuffle (TDqOutput $10 '0) $11 '1 '"HashV2")) (let $14 (DataType 'Uint64)) (let $15 (OptionalType $14)) (let $16 (Bool 'false)) (let $17 (lambda '($75) $75)) (let $18 '('('"_logical_id" '2650) '('"_id" '"229520d4-9b5ae4fb-6b85d39f-8cc4f1cd") '('"_wide_channels" (StructType '('_yql_agg_0 $15))))) (let $19 (DqPhyStage '($12 $13) (lambda '($61 $62) (block '( (let $63 (GraceJoinCore (ToFlow $61) (ToFlow (WideFromBlocks $62)) '"Left" $11 $11 '() '('1 '0) '('"t1.k2") '('"t2.k2") '())) (let $64 (lambda '($69) (Just (SafeCast $69 $14)))) (let $65 (Nothing $15)) (let $66 (Condense1 (NarrowMap $63 (lambda '($67) (AsStruct '('"t2.k3" $67)))) (lambda '($68) (IfPresent (Member $68 '"t2.k3") $64 $65)) (lambda '($70 $71) $16) (lambda '($72 $73) (block '( (let $74 (IfPresent (Member $72 '"t2.k3") $64 $65)) (return (AggrAdd $74 $73)) ))))) (return (FromFlow (ExpandMap $66 $17))) ))) $18)) (let $20 (DqCnUnionAll (TDqOutput $19 '0))) (let $21 (StructType '('Sum0 $15))) (let $22 '('('"_logical_id" '3155) '('"_id" '"8c7b375c-601e9549-f514fc46-235cefaf") '('"_partition_mode" '"aggregate") '('"_wide_channels" $21))) (let $23 (DqPhyStage '($20) (lambda '($76) (FromFlow (WideCondense1 (ToFlow $76) $17 (lambda '($77 $78) $16) (lambda '($79 $80) (AggrAdd $79 $80))))) $22)) (let $24 (DqCnUnionAll (TDqOutput $23 '0))) (let $25 (DqPhyStage '($24) (lambda '($81) (block '( (let $82 (Condense (NarrowMap (ToFlow $81) (lambda '($83) (AsStruct '('Sum0 $83)))) (List (ListType $21)) (lambda '($84 $85) $16) (lambda '($86 $87) (AsList $86)))) (return (FromFlow (Map $82 (lambda '($88) (AsStruct '('"column0" (Member (ToOptional $88) 'Sum0))))))) ))) '('('"_logical_id" '3205) '('"_id" '"d449d762-b0d08d0-baf59ef1-c70a8700")))) (let $26 '($5 $8 $10 $19 $23 $25)) (let $27 (DqCnResult (TDqOutput $25 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $26 '($27) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $15))) '0 '0)) '('('"type" '"data_query")))) ) >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=foo [GOOD] Test command err: 2026-02-19T08:17:09.172401Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:135:2057] recipient: [1:133:2165] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:135:2057] recipient: [1:133:2165] Leader for TabletID 72057594037927937 is [1:139:2169] sender: [1:140:2057] recipient: [1:133:2165] 2026-02-19T08:17:09.251730Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:09.251810Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:09.251867Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:09.251919Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:181:2057] recipient: [1:179:2199] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:181:2057] recipient: [1:179:2199] Leader for TabletID 72057594037927938 is [1:185:2203] sender: [1:186:2057] recipient: [1:179:2199] Leader for TabletID 72057594037927937 is [1:139:2169] sender: [1:211:2057] recipient: [1:14:2061] 2026-02-19T08:17:09.272938Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:09.293404Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:209:2221] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-02-19T08:17:09.294566Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:217:2169] 2026-02-19T08:17:09.297391Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:217:2169] 2026-02-19T08:17:09.299556Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:218:2169] 2026-02-19T08:17:09.301442Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:218:2169] 2026-02-19T08:17:09.308859Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:09.309302Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8f3483d2-8120f0a7-cfc8f202-ffe8ce76_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:09.317323Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:09.317813Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b78cbd4e-c2a9a55f-f39f8290-93728ec4_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:09.324032Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:09.324433Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1e73e3-b22a25e7-caa4c3b3-4e2b40ca_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 2026-02-19T08:17:09.341067Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.373945Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.406560Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.451536Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.462132Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.513790Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.620985Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.631580Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.755399Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.850287Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:09.976860Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.066368Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:10.295219Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/InFlightLimitReachedDurationMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "val ... version 16 actor [8:206:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 16 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 15 } Consumers { Name: "user1" Generation: 15 Important: false MetricsLevel: 3 MonitoringProjectId: "" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Set pipe for create session: [8:302:2295] 2026-02-19T08:18:07.441058Z node 8 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [8:301:2294] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 302 RawX2: 34359740663 } Cookie: 123 } via pipe: [8:206:2219] Set pipe for create session: [8:314:2304] 2026-02-19T08:18:07.449067Z node 8 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [8:313:2303] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 314 RawX2: 34359740672 } Cookie: 123 } via pipe: [8:206:2219] XXXXX after read: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:18:07.458176Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:07.464307Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 17 actor [8:206:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 17 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 15 } Consumers { Name: "user1" Generation: 15 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after disable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Run PartitionLevelMetrics(EnableMetricsLevel=0, FirstClassCitizen=0, PartitionMetricsLevel=3, PartitionMonitoringProjectId=bar, ConsumersMonitoringProjectId=foo) 2026-02-19T08:18:08.125703Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 9 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:18:08.196609Z node 9 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:18:08.196684Z node 9 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:18:08.196733Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:08.196807Z node 9 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:18:08.223862Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:08.224961Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 18 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 18 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 18 } Consumers { Name: "user1" Generation: 18 Important: false MetricsLevel: 2 MonitoringProjectId: "foo" } MetricsLevel: 3 MonitoringProjectId: "bar" 2026-02-19T08:18:08.225796Z node 9 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [9:215:2167] 2026-02-19T08:18:08.228732Z node 9 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:215:2167] 2026-02-19T08:18:08.246831Z node 9 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [9:216:2167] 2026-02-19T08:18:08.249300Z node 9 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [9:216:2167] 2026-02-19T08:18:08.261563Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:08.262031Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e6a25f8e-ed7c6220-4f4302aa-77672b3e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:08.271834Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:08.272242Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5e229f1a-dd006f35-8637feb7-245ce963_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:08.282580Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:08.283005Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3f61bfa7-1a5dca84-8ba7c0ac-9632158a_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:08.290029Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:08.290423Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|35e11429-76f543ca-a395bc99-c0430c54_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:08.298497Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:08.298917Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|aa686d09-ab022e15-954ec184-737de899_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 XXXXX before enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:18:08.309285Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:08.314465Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 19 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 19 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 18 } Consumers { Name: "user1" Generation: 18 Important: false MetricsLevel: 3 MonitoringProjectId: "foo" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Set pipe for create session: [9:299:2291] 2026-02-19T08:18:08.316935Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:298:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 299 RawX2: 38654707955 } Cookie: 123 } via pipe: [9:207:2219] Set pipe for create session: [9:311:2300] 2026-02-19T08:18:08.321734Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:310:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 311 RawX2: 38654707964 } Cookie: 123 } via pipe: [9:207:2219] XXXXX after read: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:18:08.338359Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:08.352089Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 20 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 20 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 18 } Consumers { Name: "user1" Generation: 18 Important: false MetricsLevel: 2 MonitoringProjectId: "foo" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after disable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null >> HashShuffle::Compatibility-ShuffleElimination+HashV2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 3389491583452315406 10009834854617362745 16944011876818050635 2501220907089885140 14262969028439629129 3282373532517996887 4271479993968909220 11261052584754007566 7763448586921507039 4682259816225212021 16905571065975338505 17779994018988680381 4788154734789422967 4224449371641278089 2187003720254958852 4989818167486237032 10634665616666123632 12888203850303828292 13824930818029451247 893748498062442708 2744517546871237796 13283334592077642889 927365455909684831 10771244154444010424 1785541634243073000 2485457497293816590 2109291512807433047 11194384969982365492 12386532695798351271 3052850129890481217 8747974526797068806 6828096614062889918 452923057603893589 11031025607239830114 13395854745187631974 8660981324289587265 2718208116668658361 9369533476188580741 8710707170561444800 2690687813278274744 6892232881013157295 11602138720135576500 17405857936185259817 784626198515687307 16065628214833530982 9326286631325415334 5441332668451507817 5675821272302904796 2566934443557476307 10912298605002848391 10206759138305689309 13414401865036601163 2388761344962599187 16272275544607149820 1635232724399698660 2133841427684959510 7129073962076290100 3673806877801344649 43016450253398912 9629687526838183166 15964451272737338354 4116904983033342707 7354032058032990690 7992271034300552714 5255630886422489809 8467111723801892369 5794464233979320057 10154307134411129641 8524951281943717539 18299122191213162874 15223498947130415252 17034486030239102267 1918671090556507844 17197917097729961773 16645621728811374538 16765045863839526725 1486081757917540987 4123968283181716784 3352832161797841492 10812471125494860671 6986778636075728151 3557033728405440270 17567991550192589688 242311660143200014 8618868602649594930 17565070951968084609 11495362675290897512 17907234209718955272 17233876590288577653 7981930966436098619 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityZeroTimeout >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink |91.5%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestSetQueueAttributesMultiple >> TableCreation::UpdateTableAcl [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> HashShuffle::Compatibility-ShuffleElimination+HashV2 [GOOD] Test command err: 2026-02-19T08:17:45.142259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487601849598775:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:45.143395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:45.569905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:45.648368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:45.648456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:45.718558Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:45.729731Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487601849598743:2081] 1771489065100421 != 1771489065100424 2026-02-19T08:17:45.758850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:45.765150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:45.929441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:45.929464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:45.929471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:45.929565Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:46.164170Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:46.629343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:46.643194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:46.721014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.913467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.152521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.286343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.855763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487619029469808:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.855870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.856180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487619029469818:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.856241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.134107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487601849598775:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:50.134194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:50.213643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.262065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.308874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.359812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.402447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.480400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.530490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.638470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.795813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487623324437992:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.795905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.796624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487623324437998:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.796686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487623324437997:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.796719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.800541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:50.814245Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487623324438001:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:17:50.887982Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487623324438054:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:52.864473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474 ... :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:18:05.734045Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:18:05.734076Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:18:05.734178Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:18:05.734213Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:18:05.734253Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:18:05.734280Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:18:05.734316Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:18:05.734342Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:18:05.734515Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:18:05.734551Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:18:05.734652Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:18:05.734678Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:18:05.734719Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:18:05.734750Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:18:05.734784Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:18:05.734810Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:18:05.734934Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:18:05.734961Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-02-19T08:18:05.735043Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-02-19T08:18:05.735070Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:18:05.742708Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[2:7608487688478950603:2574];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976710674;this=136859908122464;method=TTxController::StartProposeOnExecute;tx_info=281474976710674:TX_KIND_SCHEMA;min=1771489085742;max=18446744073709551615;plan=0;src=[2:7608487658414176493:2156];cookie=472:14;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754054Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754054Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754109Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754116Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754129Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.754132Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.771088Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.771148Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.771166Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.773140Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.773174Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; 2026-02-19T08:18:05.773185Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=55;result=not_found; ( (let $1 (lambda '($34 $35) $34 $35)) (let $2 (DataType 'Uint32)) (let $3 '('"k2" (OptionalType $2))) (let $4 '('('"_logical_id" '1995) '('"_id" '"4fb7184f-bb9b20bd-694bfdc6-237e4d0a") '('"_wide_channels" (StructType '('"k1" $2) $3)))) (let $5 (DqPhyStage '() (lambda '() (block '( (let $28 (KqpTable '"/Root/Table1" '"72057594046644480:54" '"" '1)) (let $29 (KqpBlockReadOlapTableRanges $28 (Void) '('"k1" '"k2") '() '() (lambda '($33) $33))) (let $30 (lambda '($36 $37 $38 $39) $36 $37)) (let $31 (lambda '($40 $41 $42 $43 $44 $45) $44 $45)) (let $32 (lambda '($46 $47 $48 $49) $48 $49)) (return (FromFlow (DqPhyHashCombine (ToFlow (WideFromBlocks (FromFlow $29))) '-1073741824 $1 $30 $31 $32))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0 '1))) (let $7 '('('"_logical_id" '2341) '('"_id" '"f0118a5a-cf8f40f9-eecb5471-eb1ccf54") '('"_wide_channels" (StructType $3)))) (let $8 (DqPhyStage '($6) (lambda '($50) (block '( (let $51 (lambda '($52 $53 $54 $55))) (return (FromFlow (DqPhyHashCombine (ToFlow $50) '"" $1 $51 $51 (lambda '($56 $57) $57)))) ))) $7)) (let $9 '('('"_logical_id" '3385) '('"_id" '"5e9169d9-97d0cb80-d7bc66a6-1b47b1ee") '('"_wide_channels" (StructType $3 '('"k3" $2))))) (let $10 (DqPhyStage '() (lambda '() (block '( (let $58 (KqpTable '"/Root/Table2" '"72057594046644480:55" '"" '1)) (let $59 (KqpBlockReadOlapTableRanges $58 (Void) '('"k2" '"k3") '() '() (lambda '($60) $60))) (return (FromFlow $59)) ))) $9)) (let $11 '('0)) (let $12 (DqCnHashShuffle (TDqOutput $8 '0) $11 '1)) (let $13 (DqCnHashShuffle (TDqOutput $10 '0) $11 '1)) (let $14 (DataType 'Uint64)) (let $15 (OptionalType $14)) (let $16 (Bool 'false)) (let $17 (lambda '($75) $75)) (let $18 '('('"_logical_id" '2648) '('"_id" '"3ecad3e5-ed5aac9e-6fab73f3-9bf199d4") '('"_wide_channels" (StructType '('_yql_agg_0 $15))))) (let $19 (DqPhyStage '($12 $13) (lambda '($61 $62) (block '( (let $63 (GraceJoinCore (ToFlow $61) (ToFlow (WideFromBlocks $62)) '"Left" $11 $11 '() '('1 '0) '('"t1.k2") '('"t2.k2") '())) (let $64 (lambda '($69) (Just (SafeCast $69 $14)))) (let $65 (Nothing $15)) (let $66 (Condense1 (NarrowMap $63 (lambda '($67) (AsStruct '('"t2.k3" $67)))) (lambda '($68) (IfPresent (Member $68 '"t2.k3") $64 $65)) (lambda '($70 $71) $16) (lambda '($72 $73) (block '( (let $74 (IfPresent (Member $72 '"t2.k3") $64 $65)) (return (AggrAdd $74 $73)) ))))) (return (FromFlow (ExpandMap $66 $17))) ))) $18)) (let $20 (DqCnUnionAll (TDqOutput $19 '0))) (let $21 (StructType '('Sum0 $15))) (let $22 '('('"_logical_id" '3153) '('"_id" '"4b5264e8-1c122444-f085a3c0-ac280536") '('"_partition_mode" '"aggregate") '('"_wide_channels" $21))) (let $23 (DqPhyStage '($20) (lambda '($76) (FromFlow (WideCondense1 (ToFlow $76) $17 (lambda '($77 $78) $16) (lambda '($79 $80) (AggrAdd $79 $80))))) $22)) (let $24 (DqCnUnionAll (TDqOutput $23 '0))) (let $25 (DqPhyStage '($24) (lambda '($81) (block '( (let $82 (Condense (NarrowMap (ToFlow $81) (lambda '($83) (AsStruct '('Sum0 $83)))) (List (ListType $21)) (lambda '($84 $85) $16) (lambda '($86 $87) (AsList $86)))) (return (FromFlow (Map $82 (lambda '($88) (AsStruct '('"column0" (Member (ToOptional $88) 'Sum0))))))) ))) '('('"_logical_id" '3203) '('"_id" '"65d38832-d73f267e-9ccf0bf5-15ef669f")))) (let $26 '($5 $8 $10 $19 $23 $25)) (let $27 (DqCnResult (TDqOutput $25 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $26 '($27) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $15))) '0 '0)) '('('"type" '"data_query")))) ) >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid >> TestSqsTopicHttpProxy::TestSendMessageTooBig >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TestSqsTopicHttpProxy::TestDeleteMessage >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::UpdateTableAcl [GOOD] Test command err: 2026-02-19T08:17:40.213136Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487579183213612:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:40.213185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:40.408591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:40.408666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:40.414473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:40.515435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487579183213348:2081] 1771489060158005 != 1771489060158008 2026-02-19T08:17:40.676195Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:40.962519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:40.962537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:40.962544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:40.962641Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:41.033697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:41.041971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:41.205856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:44.259814Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:17:44.270240Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:17:44.270284Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:17:44.270302Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:17:44.272507Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Describe result: PathErrorUnknown 2026-02-19T08:17:44.272518Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Creating table 2026-02-19T08:17:44.272570Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:17:44.272686Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Describe result: PathErrorUnknown 2026-02-19T08:17:44.272691Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Creating table 2026-02-19T08:17:44.272707Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:17:44.272727Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Describe result: PathErrorUnknown 2026-02-19T08:17:44.272731Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Creating table 2026-02-19T08:17:44.272739Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:17:44.286422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:44.288940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:44.293952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:44.301270Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:17:44.305919Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:17:44.305987Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Subscribe on create table tx: 281474976710659 2026-02-19T08:17:44.307907Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:17:44.307934Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Subscribe on create table tx: 281474976710660 2026-02-19T08:17:44.310059Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Subscribe on create table tx: 281474976710658 2026-02-19T08:17:44.312501Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Subscribe on tx: 281474976710659 registered 2026-02-19T08:17:44.314757Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Subscribe on tx: 281474976710660 registered 2026-02-19T08:17:44.314785Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Subscribe on tx: 281474976710658 registered 2026-02-19T08:17:44.449121Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-02-19T08:17:44.501821Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-02-19T08:17:44.514686Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608487596363083401:2499] Owner: [1:7608487596363083399:2497]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-02-19T08:17:44.546109Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Table already exists, number of columns: 34, has SecurityObject: true 2026-02-19T08:17:44.546222Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Column diff is empty, finishing 2026-02-19T08:17:44.547292Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:17:44.548396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:17:44.549954Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:17:44.549969Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7608487596363083400:2498] Owner: [1:7608487596363083399:2497]. Successful alter request: ExecComplete 2026-02-19T08:17:44.562040Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Table already exists, number of columns: 7, has SecurityObject: true 2026-02-19T08:17:44.562083Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Column diff is empty, finishing 2026-02-19T08:17:44.562169Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608487596363083402:2500] Owner: [1:7608487596363083399:2497]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:17:44.563107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:17:44.56478 ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:18:08.678110Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MzYyYTYyZGMtYmY0MjU2YmUtOGExMmUzZi1lMGJlOTE3Yw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7608487702361850107:2529] 2026-02-19T08:18:08.678161Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 42 timeout: 300.000000s actor id: [3:7608487702361850109:2928] 2026-02-19T08:18:08.689625Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 42, sender: [3:7608487702361850108:2530], selfId: [3:7608487672297077555:2270], source: [3:7608487702361850107:2529] 2026-02-19T08:18:08.696834Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608487702361850104:2926], ActorId: [3:7608487702361850105:2927], TraceId: ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MzYyYTYyZGMtYmY0MjU2YmUtOGExMmUzZi1lMGJlOTE3Yw==, TxId: 2026-02-19T08:18:08.697527Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608487702361850104:2926], ActorId: [3:7608487702361850105:2927], TraceId: ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MzYyYTYyZGMtYmY0MjU2YmUtOGExMmUzZi1lMGJlOTE3Yw==, TxId: 2026-02-19T08:18:08.697562Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2379: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608487702361850104:2926], ActorId: [3:7608487702361850105:2927], TraceId: ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-02-19T08:18:08.697726Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608487702361850103:2925], ActorId: [3:7608487702361850104:2926], TraceId: ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e, RequestDatabase: /dc-1, Got response [3:7608487702361850105:2927] SUCCESS 2026-02-19T08:18:08.697804Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2492: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608487702361850102:2924] ActorId: [3:7608487702361850103:2925] Database: /dc-1 ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e. Extracted script execution operation [3:7608487702361850105:2927], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7608487689476947592:2668], LeaseGeneration: 0 2026-02-19T08:18:08.697850Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2518: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608487702361850102:2924] ActorId: [3:7608487702361850103:2925] Database: /dc-1 ExecutionId: d9dfe33d-c4297171-9bb13306-7479930e. Reply success 2026-02-19T08:18:08.698421Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=MzYyYTYyZGMtYmY0MjU2YmUtOGExMmUzZi1lMGJlOTE3Yw==, workerId: [3:7608487702361850107:2529], local sessions count: 0 2026-02-19T08:18:08.744296Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtfmn76a40y2kvs4nmachtj", Request has 18444972584620.807357s seconds to be completed 2026-02-19T08:18:08.747083Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtfmn76a40y2kvs4nmachtj", Created new session, sessionId: ydb://session/3?node_id=3&id=OTU2YTA3MGYtYjRiYzk2Yy01ZmQ1MjY2Mi0yNGM3OGM3Ng==, workerId: [3:7608487702361850136:2543], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:18:08.747412Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtfmn76a40y2kvs4nmachtj 2026-02-19T08:18:08.786756Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtfmn8j2cazgpqcbwdabkfk, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTU2YTA3MGYtYjRiYzk2Yy01ZmQ1MjY2Mi0yNGM3OGM3Ng==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 44, targetId: [3:7608487702361850136:2543] 2026-02-19T08:18:08.786803Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 44 timeout: 600.000000s actor id: [3:7608487702361850139:2935] 2026-02-19T08:18:08.817644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:08.825222Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtfmn8j2cazgpqcbwdabkfk", Forwarded response to sender actor, requestId: 44, sender: [3:7608487702361850138:2544], selfId: [3:7608487672297077555:2270], source: [3:7608487702361850136:2543] --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:18:08.845839Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Describe result: PathErrorUnknown 2026-02-19T08:18:08.845878Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Creating table 2026-02-19T08:18:08.845929Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-02-19T08:18:08.853957Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:08.856525Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710688 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-02-19T08:18:08.856560Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Subscribe on create table tx: 281474976710688 2026-02-19T08:18:08.870360Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Subscribe on tx: 281474976710688 registered 2026-02-19T08:18:08.921016Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Request: create. Transaction completed: 281474976710688. Doublechecking... 2026-02-19T08:18:09.000874Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Table already exists, number of columns: 3, has SecurityObject: true 2026-02-19T08:18:09.000918Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608487702361850150:2942] Owner: [3:7608487702361850149:2941]. Column diff is empty, finishing 2026-02-19T08:18:09.029327Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtfmng5aqf6kg2p5y3s5bpq", Request has 18444972584620.522320s seconds to be completed 2026-02-19T08:18:09.031846Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtfmng5aqf6kg2p5y3s5bpq", Created new session, sessionId: ydb://session/3?node_id=3&id=MjYyM2U3ZC1iMzBhMzM3Mi04Y2FkZjA4My1iMWMzNmQ4Ng==, workerId: [3:7608487706656817581:2556], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:18:09.032068Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtfmng5aqf6kg2p5y3s5bpq 2026-02-19T08:18:09.058594Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608487706656817587:3049] Owner: [3:7608487706656817586:3048]. Table already exists, number of columns: 3, has SecurityObject: true 2026-02-19T08:18:09.058635Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608487706656817587:3049] Owner: [3:7608487706656817586:3048]. Column diff is empty, finishing 2026-02-19T08:18:09.058720Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7608487706656817587:3049] Owner: [3:7608487706656817586:3048]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-02-19T08:18:09.059822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:09.060710Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608487706656817587:3049] Owner: [3:7608487706656817586:3048]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710689 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:18:09.060737Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7608487706656817587:3049] Owner: [3:7608487706656817586:3048]. Successful alter request: ExecComplete 2026-02-19T08:18:09.062253Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=MjYyM2U3ZC1iMzBhMzM3Mi04Y2FkZjA4My1iMWMzNmQ4Ng==, workerId: [3:7608487706656817581:2556], local sessions count: 1 2026-02-19T08:18:09.074557Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtfmnhj6jywqjvagj33dp9a", Request has 18444972584620.477091s seconds to be completed 2026-02-19T08:18:09.077139Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtfmnhj6jywqjvagj33dp9a", Created new session, sessionId: ydb://session/3?node_id=3&id=OWQyNjI4ZjUtYjQyZjNiNGMtZmQxMjQwZWItMjI2YjRlNjk=, workerId: [3:7608487706656817599:2560], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:18:09.077356Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtfmnhj6jywqjvagj33dp9a 2026-02-19T08:18:09.114132Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=OWQyNjI4ZjUtYjQyZjNiNGMtZmQxMjQwZWItMjI2YjRlNjk=, workerId: [3:7608487706656817599:2560], local sessions count: 1 2026-02-19T08:18:09.114431Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=OTU2YTA3MGYtYjRiYzk2Yy01ZmQ1MjY2Mi0yNGM3OGM3Ng==, workerId: [3:7608487702361850136:2543], local sessions count: 0 >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> TBoardSubscriberTest::ReconnectReplica >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:52.065288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:52.065401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:52.065472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:52.065514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:52.065557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:52.065608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:52.065644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:52.065667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:52.066878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:52.066986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:52.068104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:52.068447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:52.184863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:52.184944Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:52.215988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:52.222057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:52.222311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:52.248018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:52.248829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:52.249606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:52.249994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:52.265192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:52.265449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:52.266781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:52.266865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:52.266989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:52.267054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:52.267117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:52.267330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:52.450885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.451902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:52.452878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... # 2 2026-02-19T08:17:59.978629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:17:59.978702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:17:59.978846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:17:59.978919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:17:59.978969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:17:59.979020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:17:59.989575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:18:02.777860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0106 2026-02-19T08:18:02.777962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0247 2026-02-19T08:18:02.812078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-02-19T08:18:02.812297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:18:02.812369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:02.812479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:18:02.812520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:18:02.812547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:02.812603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:18:02.823694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:18:05.654242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0106 2026-02-19T08:18:05.654380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0247 2026-02-19T08:18:05.702298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-02-19T08:18:05.702509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:18:05.702594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:05.702709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:18:05.702760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:18:05.702793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:05.702838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:18:05.713893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:18:08.966847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0038 2026-02-19T08:18:08.966988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0067 2026-02-19T08:18:09.008444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-02-19T08:18:09.008606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:18:09.008649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:09.008728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:18:09.008775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:18:09.008809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:18:09.008841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:18:09.019811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:18:12.897032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:18:12.897164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:18:12.897273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLEnabledTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409547, last: now - 60.024000s, next: now + 0.000000s, now: 1970-01-01T00:01:00.024000Z, at schemeshard: 72057594046678944 2026-02-19T08:18:12.897441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLEnabledTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:18:12.897515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:18:12.897757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409547, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 40 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:18:12.898362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:18:12.899017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:12.899078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:18:12.902983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:12.903247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:18:12.903358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409547, last: now - 0.002000s, next: now + 3599.998000s, now: 1970-01-01T00:01:00.026000Z, at schemeshard: 72057594046678944 2026-02-19T08:18:12.903442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PrivateApi::GetTask [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown >> PrivateApi::Nodes >> TBoardSubscriber2DCTest::ReconnectReplica >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> TestSqsTopicHttpProxy::TestSetQueueAttributesMultiple [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> TBoardSubscriber2DCTest::NotAvailableByShutdown |91.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2026-02-19T08:17:41.567423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487584117372757:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:41.567466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:41.649249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:42.086641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:42.086735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:42.132857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:42.158294Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487584117372651:2081] 1771489061562248 != 1771489061562251 2026-02-19T08:17:42.169856Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:42.171152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:42.366332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:42.554438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:42.558081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:42.574669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:45.759649Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:17:45.777384Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1460: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-02-19T08:17:45.780921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487601297242693:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.781058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.781444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487601297242702:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.781500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.786810Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1336: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-02-19T08:17:45.787032Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:17:45.787055Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:17:45.787077Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:17:45.787271Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 2, sender: [1:7608487588412340753:2486], selfId: [1:7608487584117372916:2270], source: [1:7608487584117372916:2270] 2026-02-19T08:17:45.788324Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1460: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-02-19T08:17:45.788415Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1336: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-02-19T08:17:45.788533Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 3, sender: [1:7608487588412340753:2486], selfId: [1:7608487584117372916:2270], source: [1:7608487584117372916:2270] 2026-02-19T08:17:45.789155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487601297242704:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.789322Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1460: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2026-02-19T08:17:45.789351Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1336: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq } 2026-02-19T08:17:45.789443Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 4, sender: [1:7608487588412340753:2486], selfId: [1:7608487584117372916:2270], source: [1:7608487584117372916:2270] 2026-02-19T08:17:45.789486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.789995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487601297242706:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:45.790064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2026-02-19T08:17:50.943656Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:50.956975Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:17:50.961689Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:17:50.970466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:17:50.970557Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:51.272700Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:51.272840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:51.299642Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489066791651 != 1771489066791655 2026-02-19T08:17:51.302607Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:51.348735Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:51.428392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:17:51.852081Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:51.866004Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:51.867135Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:281:2328], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:17:51.867412Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:281:2328], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:17:51.869052Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:281:2328], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-02-19T08:17:51.869183Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: Resolv ... 3306], IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [5:7608487726785803622:2375] 2026-02-19T08:18:14.318601Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [5:7608487726785803628:3393] 2026-02-19T08:18:14.318652Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=5&id=MTYwOTZlZTItY2E0YmMwOGItYzUxODU5YWQtOWVlMDk1MDU=, workerId: [5:7608487722490836218:2362], local sessions count: 1 2026-02-19T08:18:14.318941Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972584615.232704s seconds to be completed 2026-02-19T08:18:14.319773Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608487726785803629:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.319869Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.320128Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608487726785803634:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.320166Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608487726785803635:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.320260Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.320724Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=5&id=MzRmNzY4YS03YzkwZTcxNy1jNzMzMGU1Yy1lNDBmNjRkZg==, workerId: [5:7608487726785803642:2383], database: /Root, longSession: 1, local sessions count: 2 2026-02-19T08:18:14.320907Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:18:14.321550Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7608487722490836211:3306], ActorId: [5:7608487726785803624:3390], TraceId: ExecutionId: b7fa80c8-d9205975-c8b451c7-785f6e65, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=MzRmNzY4YS03YzkwZTcxNy1jNzMzMGU1Yy1lNDBmNjRkZg==, TxId: , text: -- TScriptProgressActor::OnRunQuery DECLARE $execution_id AS Text; DECLARE $database AS Text; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $execution_status AS Int32; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, execution_status = $execution_status WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-02-19T08:18:14.322018Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzRmNzY4YS03YzkwZTcxNy1jNzMzMGU1Yy1lNDBmNjRkZg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [5:7608487726785803642:2383] 2026-02-19T08:18:14.322040Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [5:7608487726785803657:3402] 2026-02-19T08:18:14.323446Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:14.346346Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtfmtp9css4vp08mtpdp7cb", Request has 18444972584615.205319s seconds to be completed 2026-02-19T08:18:14.348478Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtfmtp9css4vp08mtpdp7cb", Created new session, sessionId: ydb://session/3?node_id=5&id=MzBlY2ZkODQtZmNhNTcyZWQtZGMwMGE3ZjYtOWVkNTJjMg==, workerId: [5:7608487726785803705:2388], database: /Root, longSession: 1, local sessions count: 3 2026-02-19T08:18:14.348651Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtfmtp9css4vp08mtpdp7cb 2026-02-19T08:18:14.353926Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608487726785803638:2382], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2026-02-19T08:18:14.367534Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtfmtpz5gex22r61bckhbdv, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzBlY2ZkODQtZmNhNTcyZWQtZGMwMGE3ZjYtOWVkNTJjMg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [5:7608487726785803705:2388] 2026-02-19T08:18:14.367576Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [5:7608487726785803710:3446] 2026-02-19T08:18:14.437161Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7608487726785803738:3459], for# user@builtin, access# DescribeSchema 2026-02-19T08:18:14.437198Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7608487726785803738:3459], for# user@builtin, access# DescribeSchema 2026-02-19T08:18:14.439157Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608487726785803724:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:18:14.441424Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=MzBlY2ZkODQtZmNhNTcyZWQtZGMwMGE3ZjYtOWVkNTJjMg==, ActorId: [5:7608487726785803705:2388], ActorState: ExecuteState, LegacyTraceId: 01khtfmtpz5gex22r61bckhbdv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:18:14.442081Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtfmtpz5gex22r61bckhbdv", Forwarded response to sender actor, requestId: 11, sender: [5:7608487726785803709:2390], selfId: [5:7608487705310965414:2273], source: [5:7608487726785803705:2388] 2026-02-19T08:18:14.443053Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608487726785803727:3451] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:14.582265Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtfmsvxed3wg666qj610gz6", Forwarded response to sender actor, requestId: 7, sender: [5:7608487722490836211:3306], selfId: [5:7608487705310965414:2273], source: [5:7608487726785803622:2375] 2026-02-19T08:18:14.583507Z node 5 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7608487722490836211:3306], ActorId: [5:7608487726785803754:3470], TraceId: ExecutionId: b7fa80c8-d9205975-c8b451c7-785f6e65, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7608487726785803755:3471] 2026-02-19T08:18:14.583555Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7608487726785803754:3470], ActorId: [5:7608487726785803755:3471], TraceId: ExecutionId: b7fa80c8-d9205975-c8b451c7-785f6e65, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 1, run create session 2026-02-19T08:18:14.585070Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972584614.966569s seconds to be completed 2026-02-19T08:18:14.588085Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=5&id=YjcyNzY2YmYtYWIzZTRlMjQtNmMxNTU1NTktODExZjVhYQ==, workerId: [5:7608487726785803757:2402], database: /Root, longSession: 1, local sessions count: 4 2026-02-19T08:18:14.588357Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:18:14.589992Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7608487726785803754:3470], ActorId: [5:7608487726785803755:3471], TraceId: ExecutionId: b7fa80c8-d9205975-c8b451c7-785f6e65, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=YjcyNzY2YmYtYWIzZTRlMjQtNmMxNTU1NTktODExZjVhYQ==, TxId: , text: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-02-19T08:18:14.590532Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=YjcyNzY2YmYtYWIzZTRlMjQtNmMxNTU1NTktODExZjVhYQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [5:7608487726785803757:2402] 2026-02-19T08:18:14.590563Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [5:7608487726785803759:3472] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TestSqsTopicHttpProxy::TestGetQueueUrl >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid [GOOD] >> TestSqsTopicHttpProxy::TestSetQueueAttributesNonExistentQueue >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageTooBig [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] >> KqpPg::EquiJoin+useSink [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer >> KqpPg::EquiJoin-useSink >> TBoardSubscriber2DCTest::SimpleSubscriber |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2026-02-19T08:17:42.017492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487588761330554:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:42.017544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:42.246551Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487587457390130:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:42.429498Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:42.476336Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608487587835847014:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:42.476380Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:43.030299Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:43.234995Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:43.370481Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.373855Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.390863Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:43.386300Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.388387Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:43.393837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.413923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.414013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:43.417948Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:43.446268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:43.469920Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:43.470666Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:43.485930Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:43.559686Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.594171Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.613800Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.622416Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:43.843129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:43.843207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:43.858953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:43.859037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:43.919972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:43.920034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:43.920177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:43.920227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:43.920348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:43.920376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:44.065012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.082341Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:17:44.083399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.231150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:17:44.256700Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:44.342242Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:44.339065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:44.370024Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:44.379684Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:17:44.379731Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:17:44.379770Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:17:44.395419Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:44.411084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.411292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.411396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.649640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:17:45.872101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:47.021271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487588761330554:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:47.021336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:47.209905Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487587457390130:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:47.209995Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:47.478000Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608487587835847014:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:47.478104Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:52.223094Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:17:52.250413Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for conf ... kie 10 2026-02-19T08:18:11.643790Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:11.709877Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-02-19T08:18:11.783103Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783289Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783381Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783472Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783577Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783657Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783721Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783776Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.783840Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038890 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:18:11.796984Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038890 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:11.797091Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038890 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:11.809142Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038890 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:12.053068Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:12.055162Z node 10 :STATISTICS WARN: tx_init.cpp:292: [72075186224038891] TTxInit::Complete. EnableColumnStatistics=false 2026-02-19T08:18:12.310007Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:12.310031Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:12.310041Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:12.310124Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:12.382554Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:18:12.636687Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:12.637188Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:12.869779Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7608487715616203950:2917], Database: /Root/test-serverless, Start database fetching 2026-02-19T08:18:12.869981Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7608487715616203950:2917], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2026-02-19T08:18:15.615544Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7608487707048869072:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:15.615922Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:15.809514Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-02-19T08:18:15.809604Z node 11 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:47: Enabled on start 2026-02-19T08:18:15.811976Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7608487728523706520:2364], Start check tables existence, number paths: 2 2026-02-19T08:18:15.812291Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-02-19T08:18:15.812316Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-02-19T08:18:15.812339Z node 11 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:79: Subscribed to config changes 2026-02-19T08:18:15.812350Z node 11 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:86: Become enabled 2026-02-19T08:18:15.819531Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7608487728523706520:2364], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-02-19T08:18:15.819621Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7608487728523706520:2364], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-02-19T08:18:15.819665Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7608487728523706520:2364], Successfully finished 2026-02-19T08:18:15.819768Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-02-19T08:18:15.819795Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-02-19T08:18:15.821747Z node 11 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [11:7608487728523706570:2673], domain# [OwnerId: 72057594046644480, LocalPathId: 41], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:18:15.824779Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976720657:2, at schemeshard: 72075186224037898, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:18:16.544487Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7608487711321235389:2201];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:16.544577Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:17.428545Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-02-19T08:18:17.428710Z node 10 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:47: Enabled on start 2026-02-19T08:18:17.431412Z node 10 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:79: Subscribed to config changes 2026-02-19T08:18:17.431424Z node 10 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:86: Become enabled 2026-02-19T08:18:17.437108Z node 10 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [10:7608487737091040594:3028], domain# [OwnerId: 72057594046644480, LocalPathId: 42], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:18:17.437623Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-02-19T08:18:17.437653Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-02-19T08:18:17.437699Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7608487737091040595:2402], Start check tables existence, number paths: 2 2026-02-19T08:18:17.441617Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976730657:2, at schemeshard: 72075186224038899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:18:17.441970Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7608487737091040595:2402], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-02-19T08:18:17.442028Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7608487737091040595:2402], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-02-19T08:18:17.442070Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7608487737091040595:2402], Successfully finished 2026-02-19T08:18:17.442156Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-02-19T08:18:17.442183Z node 10 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-02-19T08:18:17.904847Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-02-19T08:18:17.905346Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:18:17.911208Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2026-02-19T08:18:17.911584Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:18:17.971492Z node 11 :FQ_ROW_DISPATCHER ERROR: local_leader_election.cpp:596: TLeaderElection [11:7608487728523706524:2368] Closing session with status UNAVAILABLE and issues: {
: Fatal: Link failure } 2026-02-19T08:18:17.971605Z node 11 :FQ_ROW_DISPATCHER WARN: local_leader_election.cpp:602: TLeaderElection [11:7608487728523706524:2368] Session already closed, but got status SUCCESS and issues: 2026-02-19T08:18:18.020329Z node 10 :FQ_ROW_DISPATCHER ERROR: local_leader_election.cpp:596: TLeaderElection [10:7608487737091040564:2396] Closing session with status UNAVAILABLE and issues: {
: Fatal: Link failure } 2026-02-19T08:18:18.020394Z node 10 :FQ_ROW_DISPATCHER WARN: local_leader_election.cpp:602: TLeaderElection [10:7608487737091040564:2396] Session already closed, but got status SUCCESS and issues: >> TBoardSubscriberTest::DropByDisconnect [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |91.6%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> PartitionStats::Collector [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: 2026-02-19T08:15:18.729684Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486971585727905:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:18.745779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:19.381241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:19.381352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:19.520181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:19.530900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:19.606789Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:19.609937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486971585727678:2081] 1771488918660331 != 1771488918660334 2026-02-19T08:15:19.733978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:19.793679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:19.871210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:19.871237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:19.871243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:19.871319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:20.694959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:20.703565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:15:20.833679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.093158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.382513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.526309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.729861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486971585727905:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:23.753867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:23.763874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486993060566050:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.763980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.764358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486993060566060:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.764398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.164076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.217655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.253929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.289440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.323725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.366261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.407843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.480543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:24.588542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486997355534230:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.588604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.588862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486997355534235:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.588906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486997355534236:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.589004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:24.593900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:24.605727Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486997355534239:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:15:24.683521Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486997355534290:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:26.463561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:09.954576Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:09.957765Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608487447098244005:2081] 1771489029845291 != 1771489029845294 2026-02-19T08:17:09.991932Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:09.992046Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:10.002555Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:10.071429Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:10.071461Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:10.071469Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:10.071589Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:10.105650Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:10.608105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:10.620387Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:10.632592Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:10.707951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:10.879263Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:10.894555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:10.988757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.465873Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487468573082360:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:14.466002Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:14.467905Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487468573082370:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:14.467990Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:14.558245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.616719Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.682012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.731296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.778573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.825621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.855926Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608487447098244050:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:14.856093Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:14.882037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:14.958666Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:15.091671Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487472868050545:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:15.091806Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:15.092232Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487472868050550:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:15.092306Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608487472868050551:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:15.092458Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:15.099210Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:15.121331Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608487472868050554:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:15.201612Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608487472868050605:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:17.879092Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:24.944614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:17:24.944646Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> VDiskBalancing::TestRandom_Block42 [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrl [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> VectorIndexBuildTest::CreateAndDrop >> TestSqsTopicHttpProxy::TestSetQueueAttributesNonExistentQueue [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq >> TestSqsTopicHttpProxy::TestSetQueueAttributesNonExistentConsumer >> VectorIndexBuildTest::RecreatedColumns >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> IndexBuildTest::RejectsCreate >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> IndexBuildTest::CheckLimitWithDroppedIndexUniq >> IndexBuildTest::CancellationNotEnoughRetries >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false >> CrossShardUniqIndexValidationTest::Validation [GOOD] >> FulltextIndexBuildTest::Basic >> TestSqsTopicHttpProxy::TestListQueues >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> TestSqsTopicHttpProxy::TestSendMessageBatchLong |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |91.6%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 11332185239887435408 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2026-02-19T08:15:02.369113Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2026-02-19T08:15:02.571231Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2026-02-19T08:15:03.900213Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:202:18] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:202:18] 2026-02-19T08:15:03.900471Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:223:18] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:223:18] 2026-02-19T08:15:03.900656Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7687:17] ServerId# [1:7695:1099] TabletId# 72057594037932033 PipeClientId# [5:7687:17] 2026-02-19T08:15:03.900763Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:209:18] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:209:18] 2026-02-19T08:15:03.900868Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:195:18] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:195:18] 2026-02-19T08:15:03.900971Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:230:18] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:230:18] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Statu ... 36 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Stop node 3 2026-02-19T08:17:29.249458Z 1 00h25m30.716628s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 4 2026-02-19T08:17:31.545137Z 1 00h25m40.745547s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 7 2026-02-19T08:17:34.572480Z 1 00h26m10.747083s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 1 2026-02-19T08:17:35.243922Z 1 00h26m20.751024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2026-02-19T08:17:36.346782Z 1 00h26m40.753072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Starting nodes Start compaction 1 Start checking >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> PrivateApi::Nodes [GOOD] >> FulltextIndexBuildTest::Basic [GOOD] >> FulltextIndexBuildTest::FlatRelevance |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |91.6%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> IndexBuildTest::CheckLimitWithDroppedIndexUniq [GOOD] >> IndexBuildTest::Lock >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsCreateUniq |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> VectorIndexBuildTest::Metering_Documentation_Formula [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] Test command err: RandomSeed# 12163600273266177518 2026-02-19T08:18:27.858752Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:27.860744Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2996095015331668815] 2026-02-19T08:18:27.878855Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:27.984547Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:27.986427Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6921182179317297620] 2026-02-19T08:18:28.007156Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:28.087099Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:28.088948Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3562364116702839713] 2026-02-19T08:18:28.100157Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:28.538287Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:28.539737Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13272777889447834665] 2026-02-19T08:18:28.549297Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:28.637045Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:28.638525Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4982428493476615409] 2026-02-19T08:18:28.660323Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:28.740138Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:28.741484Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3266220403633861054] 2026-02-19T08:18:28.761599Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:29.205914Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:29.207855Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15538185614741892966] 2026-02-19T08:18:29.218303Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:29.316911Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:29.318816Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17257396080605562340] 2026-02-19T08:18:29.382768Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2026-02-19T08:17:44.234243Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487598954228768:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:44.234562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:17:44.430362144 283931 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:17:44.430515367 283931 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:17:44.681232Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:44.975174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:44.977739Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.045993Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19380 2026-02-19T08:17:45.159645Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.160596Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.266666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:45.270405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:45.270510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:45.278761Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.278874Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.333410Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.445794Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.445932Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.447724Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.471435Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.471544Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.471587Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.472299Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.476528Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.497090Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.553019Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.592968Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.661236Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.739280Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.742180Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.746597Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.804831Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.804912Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.838620Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.847706Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19380: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19380 } ] 2026-02-19T08:17:45.848082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecut ... : SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Set execution timeout 299.997725s 2026-02-19T08:18:30.014318Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\035\n\rRoot/yq/nodes\020\200\202\224\204\200\200\200\200\001\0303(\001\"\r\n\006tenant\020\001 \201 \"\r\n\007node_id\020\003 \002*\024\n\016active_workers\020\005 \004*\022\n\013data_center\020\013 \201 *\017\n\texpire_at\020\010 2*\017\n\010hostname\020\004 \201 *\022\n\013instance_id\020\002 \201 *\027\n\021interconnect_port\020\t \002*\026\n\020memory_allocated\020\007 \004*\022\n\014memory_limit\020\006 \004*\023\n\014node_address\020\n \201 *\r\n\007node_id\020\003 \002*\r\n\006tenant\020\001 \201 0\230\200\200\200\200\200@8\007@\000H\001R\022\t\377\242?&V\307\226i\0210\t\000\000\007\000\020\000X\000`\000h\004h\nh\007h\003h\002h\010h\006h\005h\th\001h\000r\022P\360\317\352\247\3073X\377\377\377\377\377\377\377\377\377\001x\000\262\001\000" } } } 2026-02-19T08:18:30.014495Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:18:30.014524Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-02-19T08:18:30.014617Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-02-19T08:18:30.014729Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5363: TxId: 281474976710681, task: 1. Add data: 100 / 100 2026-02-19T08:18:30.014788Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5331: TxId: 281474976710681, task: 1. Send data=100, closed=1, bufferActorId=[7:7608487793311851263:2352] 2026-02-19T08:18:30.014807Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 100 2026-02-19T08:18:30.014820Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2203: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-02-19T08:18:30.014840Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:18:30.014866Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-02-19T08:18:30.014879Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2203: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-02-19T08:18:30.014916Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:18:30.014931Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2203: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-02-19T08:18:30.015025Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3024: SelfId: [7:7608487793311851263:2352], SessionActorId: [7:7608487758952110812:2352], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:51:1]). lockId=281474976710680. ActorId=[7:7608487793311851269:2352] 2026-02-19T08:18:30.015099Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:510: Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Open: token=0 2026-02-19T08:18:30.015334Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:518: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Write: token=0 2026-02-19T08:18:30.015448Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:525: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Close: token=0 2026-02-19T08:18:30.015488Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5192: SelfId: [7:7608487793311851268:2352], TxId: 281474976710681, task: 1. TKqpForwardWriteActor receive EvBufferWriteResult from [7:7608487793311851263:2352] 2026-02-19T08:18:30.015502Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5233: SelfId: [7:7608487793311851268:2352], TxId: 281474976710681, task: 1. Finished 2026-02-19T08:18:30.015524Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:18:30.015549Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [7:7608487793311851267:2352], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfn9ztbd1hqaeg630w8kax. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ODU5MDdjODItOGE0Mjg0NjctZGZhODNiNDItYzQ2M2UyZGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:18:30.015683Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710681;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:18:30.016009Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3655: SelfId: [7:7608487793311851263:2352], SessionActorId: [7:7608487758952110812:2352], Start immediate commit 2026-02-19T08:18:30.016033Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1105: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]SetImmediateCommit 2026-02-19T08:18:30.016048Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3551: SelfId: [7:7608487793311851263:2352], SessionActorId: [7:7608487758952110812:2352], Flush data 2026-02-19T08:18:30.016196Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1265: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Send EvWrite to ShardID=72075186224037899, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710680 DataShard: 72075186224037899 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 51, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=212 2026-02-19T08:18:30.022768Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:769: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_COMPLETED, TxId=5, Locks= , Cookie=1 2026-02-19T08:18:30.022799Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1049: SelfId: [7:7608487793311851269:2352], Table: `Root/yq/nodes` ([72057594046644480:51:1]), SessionActorId: [7:7608487758952110812:2352]Got completed result TxId=5, TabletId=72075186224037899, Cookie=1, Mode=3, Locks= 2026-02-19T08:18:30.022849Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4680: SelfId: [7:7608487793311851263:2352], SessionActorId: [7:7608487758952110812:2352], Committed TxId=0 2026-02-19T08:18:30.074432Z node 7 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:113: Call create directory "Root/yq" 2026-02-19T08:18:30.074457Z node 7 :YQ_CONTROL_PLANE_STORAGE DEBUG: schema.cpp:383: Call create directory "Root/yq" 2026-02-19T08:18:30.189897Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29845: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29845 } ] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TestSqsTopicHttpProxy::TestSetQueueAttributesNonExistentConsumer [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq [GOOD] >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> TestSqsTopicHttpProxy::TestSetQueueAttributesUnknownAttribute >> FulltextIndexBuildTest::FlatRelevance [GOOD] >> FulltextIndexBuildTest::DropTableWithFlatRelevance >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq3Consumers >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> IndexBuildTest::ShadowDataNotAllowedByDefault >> VectorIndexBuildTest::CreateAndDrop [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false >> IndexBuildTest::RejectsCreateUniq [GOOD] >> IndexBuildTest::RejectsDropIndex >> IndexBuildTest::Metering_Documentation_Formula [GOOD] >> IndexBuildTest::LockUniq >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TestSqsTopicHttpProxy::TestListQueues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq >> VectorIndexBuildTest::RecreatedColumns [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty >> TestSqsTopicHttpProxy::TestSetQueueAttributesBasic >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> FulltextIndexBuildTest::DropTableWithFlatRelevance [GOOD] >> FulltextIndexBuildTest::FlatRelevanceLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::RejectsDropIndexUniq >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> DDisk::PersistentBufferFreeSpace [GOOD] >> IndexBuildTest::LockUniq [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true >> KqpPg::ExplainColumnsReorder [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq [GOOD] >> IndexBuildTest::DropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2026-02-19T08:18:02.957113Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.957173Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.957204Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.957626Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.969419Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.969571Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.969892Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.970393Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.970537Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.970641Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.970693Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:18:02.971422Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.971469Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.971517Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.971814Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.972318Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.972446Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.972614Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.973012Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.973114Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.973208Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.973252Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:18:02.974239Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.974266Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.974300Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.974589Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.975076Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.975225Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.975473Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.976196Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.976380Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.976468Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.976521Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:18:02.977411Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.977431Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.977462Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.977758Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.978292Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.978415Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.978585Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.980077Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.980577Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.981685Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.981763Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:18:02.983312Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.983334Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.983358Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.983703Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.988880Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.989055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.989364Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.989881Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.990077Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.990171Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.990212Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:18:02.990934Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.990959Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.990978Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.991309Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.991926Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.992050Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.992248Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.992634Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.992745Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.992833Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.992878Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:18:02.993802Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.993831Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.993867Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.994188Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.994750Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.994867Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.995066Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.995748Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.995960Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:02.996054Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:02.996111Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:18:02.997006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.997065Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.997093Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:02.997372Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:02.997890Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:02.998014Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:02.998255Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:02.999696Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:03.000107Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:03.000190Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:03.000222Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:18:03.028234Z :ReadSession INFO: Random seed for debugging is 1771489083028198 2026-02-19T08:18:03.619748Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487679227409463:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:03.630402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:03.711377Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:18:03.735656Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487678651582113:2216];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:03.735722Z node 2 :M ... ode 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.569666Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:33.569687Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.569699Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:33.670280Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:33.670304Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.670327Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:33.670344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.670356Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:33.773643Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:33.773666Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.773675Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:33.773686Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.773696Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:33.876231Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:33.876268Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.876280Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:33.876298Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.876310Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:33.976168Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:33.976204Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.976216Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:33.976231Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:33.976241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:34.077812Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:34.077848Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.077860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:34.077876Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.077886Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:34.130353Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1338: session cookie 1 consumer shared/user session shared/user_1_1_16667788187262287875_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-02-19T08:18:34.181811Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:34.181845Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.181856Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:34.181869Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.181878Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:34.213788Z :INFO: [/Root] [/Root] [972ec663-57fe76d6-1811c0e4-381319cb] Closing read session. Close timeout: 0.000000s 2026-02-19T08:18:34.213859Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-02-19T08:18:34.213899Z :INFO: [/Root] [/Root] [972ec663-57fe76d6-1811c0e4-381319cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16571 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:18:34.214030Z :NOTICE: [/Root] [/Root] [972ec663-57fe76d6-1811c0e4-381319cb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:18:34.214072Z :DEBUG: [/Root] [/Root] [972ec663-57fe76d6-1811c0e4-381319cb] [dc1] Abort session to cluster 2026-02-19T08:18:34.214605Z :NOTICE: [/Root] [/Root] [972ec663-57fe76d6-1811c0e4-381319cb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:18:34.217970Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_16667788187262287875_v1 grpc read done: success# 0, data# { } 2026-02-19T08:18:34.222355Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_16667788187262287875_v1 grpc read failed 2026-02-19T08:18:34.225797Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_16667788187262287875_v1 2026-02-19T08:18:34.222417Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_16667788187262287875_v1 grpc closed 2026-02-19T08:18:34.237830Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037892] server disconnected, pipe [1:7608487739356954054:2484] destroyed 2026-02-19T08:18:34.237924Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_16667788187262287875_v1 2026-02-19T08:18:34.222452Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_16667788187262287875_v1 is DEAD 2026-02-19T08:18:34.225472Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7608487739356954051:2481] disconnected. 2026-02-19T08:18:34.225495Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7608487739356954051:2481] disconnected; active server actors: 1 2026-02-19T08:18:34.225517Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7608487739356954051:2481] client user disconnected session shared/user_1_1_16667788187262287875_v1 2026-02-19T08:18:34.282584Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:34.282612Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.282623Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:34.282638Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.282648Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:34.385816Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:34.385853Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.385864Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:34.385879Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:34.385897Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:18:35.837623Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:35.837665Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:35.837736Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:18:35.838091Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:18:35.838828Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:18:35.839030Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:18:35.839503Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:18:35.840290Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:35.840768Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:18:35.841081Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-02-19T08:18:35.841162Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:18:35.841220Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:18:35.841266Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-02-19T08:18:35.841430Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:18:35.841476Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::WithFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> DDisk::PersistentBufferFreeSpace [GOOD] Test command err: RandomSeed# 12854836685190942611 next iteration write persistent buffer offset# 40960 size# 8192 lsn# 1 letter# . erase persistent buffer offset# 40960 size# 8192 lsn# 1 erase persistent buffer offset# 40960 size# 8192 lsn# 1 write offset# 40960 size# 12288 letter# / write offset# 40960 size# 4096 letter# 0 list write offset# 20480 size# 12288 letter# 1 write offset# 20480 size# 8192 letter# 2 write persistent buffer offset# 4096 size# 4096 lsn# 2 letter# 3 list sync persistent buffer offset# 4096 size# 4096 lsn# 2 write persistent buffer offset# 45056 size# 8192 lsn# 3 letter# 4 erase persistent buffer offset# 45056 size# 8192 lsn# 3 erase persistent buffer offset# 45056 size# 8192 lsn# 3 write persistent buffer offset# 45056 size# 4096 lsn# 4 letter# 5 write persistent buffer offset# 49152 size# 4096 lsn# 5 letter# 6 erase persistent buffer offset# 49152 size# 4096 lsn# 5 erase persistent buffer offset# 49152 size# 4096 lsn# 5 read offset# 20480 size# 12288 read persistent buffer offset# 45056 size# 4096 lsn# 4 read persistent buffer offset# 45056 size# 4096 lsn# 4 write offset# 0 size# 8192 letter# 7 write offset# 61440 size# 4096 letter# 8 write persistent buffer offset# 4096 size# 8192 lsn# 6 letter# 9 read persistent buffer offset# 4096 size# 8192 lsn# 6 write persistent buffer offset# 20480 size# 8192 lsn# 7 letter# A write offset# 4096 size# 8192 letter# B erase persistent buffer offset# 4096 size# 8192 lsn# 6 erase persistent buffer offset# 4096 size# 8192 lsn# 6 erase persistent buffer offset# 20480 size# 8192 lsn# 7 erase persistent buffer offset# 20480 size# 8192 lsn# 7 read persistent buffer offset# 45056 size# 4096 lsn# 4 read persistent buffer offset# 45056 size# 4096 lsn# 4 read persistent buffer offset# 45056 size# 4096 lsn# 4 sync persistent buffer offset# 45056 size# 4096 lsn# 4 list read offset# 28672 size# 8192 read offset# 0 size# 4096 write offset# 12288 size# 12288 letter# C list write offset# 4096 size# 8192 letter# D write persistent buffer offset# 12288 size# 4096 lsn# 8 letter# E read persistent buffer offset# 12288 size# 4096 lsn# 8 erase persistent buffer offset# 12288 size# 4096 lsn# 8 erase persistent buffer offset# 12288 size# 4096 lsn# 8 write persistent buffer offset# 53248 size# 12288 lsn# 9 letter# F read persistent buffer offset# 53248 size# 12288 lsn# 9 sync persistent buffer offset# 53248 size# 12288 lsn# 9 write offset# 45056 size# 8192 letter# G write offset# 45056 size# 12288 letter# H list write offset# 36864 size# 4096 letter# I read offset# 12288 size# 4096 write persistent buffer offset# 16384 size# 12288 lsn# 10 letter# J erase persistent buffer offset# 16384 size# 12288 lsn# 10 erase persistent buffer offset# 16384 size# 12288 lsn# 10 list write offset# 53248 size# 8192 letter# K write persistent buffer offset# 45056 size# 12288 lsn# 11 letter# L write offset# 0 size# 4096 letter# M read offset# 12288 size# 12288 erase persistent buffer offset# 45056 size# 12288 lsn# 11 erase persistent buffer offset# 45056 size# 12288 lsn# 11 write offset# 4096 size# 8192 letter# N read offset# 53248 size# 8192 write offset# 49152 size# 4096 letter# O list list write persistent buffer offset# 20480 size# 8192 lsn# 12 letter# P sync persistent buffer offset# 20480 size# 8192 lsn# 12 write offset# 57344 size# 4096 letter# Q write offset# 57344 size# 8192 letter# R list write persistent buffer offset# 40960 size# 12288 lsn# 13 letter# S sync persistent buffer offset# 40960 size# 12288 lsn# 13 list read offset# 36864 size# 4096 write persistent buffer offset# 0 size# 4096 lsn# 14 letter# T read offset# 36864 size# 4096 erase persistent buffer offset# 0 size# 4096 lsn# 14 erase persistent buffer offset# 0 size# 4096 lsn# 14 write persistent buffer offset# 28672 size# 8192 lsn# 15 letter# U sync persistent buffer offset# 28672 size# 8192 lsn# 15 write persistent buffer offset# 4096 size# 4096 lsn# 16 letter# V read offset# 45056 size# 12288 erase persistent buffer offset# 4096 size# 4096 lsn# 16 erase persistent buffer offset# 4096 size# 4096 lsn# 16 read offset# 61440 size# 4096 list read offset# 53248 size# 4096 write offset# 36864 size# 4096 letter# W list write persistent buffer offset# 57344 size# 8192 lsn# 17 letter# X sync persistent buffer offset# 57344 size# 8192 lsn# 17 list write persistent buffer offset# 20480 size# 8192 lsn# 18 letter# Y erase persistent buffer offset# 20480 size# 8192 lsn# 18 erase persistent buffer offset# 20480 size# 8192 lsn# 18 read offset# 16384 size# 12288 list write persistent buffer offset# 49152 size# 12288 lsn# 19 letter# Z read offset# 32768 size# 8192 erase persistent buffer offset# 49152 size# 12288 lsn# 19 erase persistent buffer offset# 49152 size# 12288 lsn# 19 write offset# 36864 size# 8192 letter# a read offset# 0 size# 4096 write offset# 4096 size# 4096 letter# b list list write persistent buffer offset# 0 size# 12288 lsn# 20 letter# c erase persistent buffer offset# 0 size# 12288 lsn# 20 erase persistent buffer offset# 0 size# 12288 lsn# 20 read offset# 61440 size# 4096 write persistent buffer offset# 40960 size# 8192 lsn# 21 letter# d erase persistent buffer offset# 40960 size# 8192 lsn# 21 erase persistent buffer offset# 40960 size# 8192 lsn# 21 write offset# 53248 size# 4096 letter# e read offset# 32768 size# 12288 write offset# 32768 size# 12288 letter# f write offset# 20480 size# 12288 letter# g write persistent buffer offset# 45056 size# 4096 lsn# 22 letter# h list read offset# 36864 size# 12288 read offset# 57344 size# 8192 erase persistent buffer offset# 45056 size# 4096 lsn# 22 erase persistent buffer offset# 45056 size# 4096 lsn# 22 list list read offset# 20480 size# 4096 write persistent buffer offset# 32768 size# 4096 lsn# 23 letter# i read persistent buffer offset# 32768 size# 4096 lsn# 23 read offset# 57344 size# 4096 list read persistent buffer offset# 32768 size# 4096 lsn# 23 sync persistent buffer offset# 32768 size# 4096 lsn# 23 read offset# 28672 size# 12288 list write persistent buffer offset# 12288 size# 4096 lsn# 24 letter# j write persistent buffer offset# 24576 size# 8192 lsn# 25 letter# k write offset# 24576 size# 12288 letter# l write offset# 20480 size# 8192 letter# m read offset# 16384 size# 12288 erase persistent buffer offset# 24576 size# 8192 lsn# 25 erase persistent buffer offset# 24576 size# 8192 lsn# 25 list erase persistent buffer offset# 12288 size# 4096 lsn# 24 erase persistent buffer offset# 12288 size# 4096 lsn# 24 write persistent buffer offset# 12288 size# 4096 lsn# 26 letter# n sync persistent buffer offset# 12288 size# 4096 lsn# 26 write persistent buffer offset# 40960 size# 4096 lsn# 27 letter# o read persistent buffer offset# 40960 size# 4096 lsn# 27 sync persistent buffer offset# 40960 size# 4096 lsn# 27 list write offset# 32768 size# 8192 letter# p list list list read offset# 28672 size# 12288 write persistent buffer offset# 12288 size# 4096 lsn# 28 letter# q sync persistent buffer offset# 12288 size# 4096 lsn# 28 write offset# 32768 size# 12288 letter# r read offset# 8192 size# 12288 list read offset# 49152 size# 4096 write offset# 4096 size# 4096 letter# s write persistent buffer offset# 0 size# 4096 lsn# 29 letter# t read persistent buffer offset# 0 size# 4096 lsn# 29 list list list write offset# 57344 size# 4096 letter# u sync persistent buffer offset# 0 size# 4096 lsn# 29 list write persistent buffer offset# 0 size# 8192 lsn# 30 letter# v read offset# 49152 size# 4096 list sync persistent buffer offset# 0 size# 8192 lsn# 30 read offset# 36864 size# 4096 read offset# 8192 size# 8192 write persistent buffer offset# 0 size# 4096 lsn# 31 letter# w erase persistent buffer offset# 0 size# 4096 lsn# 31 erase persistent buffer offset# 0 size# 4096 lsn# 31 list list write offset# 4096 size# 4096 letter# x write offset# 53248 size# 8192 letter# y list write persistent buffer offset# 4096 size# 8192 lsn# 32 letter# z list read persistent buffer offset# 4096 size# 8192 lsn# 32 erase persistent buffer offset# 4096 size# 8192 lsn# 32 erase persistent buffer offset# 4096 size# 8192 lsn# 32 write persistent buffer offset# 36864 size# 8192 lsn# 33 letter# . write offset# 20480 size# 8192 letter# / list sync persistent buffer offset# 36864 size# 8192 lsn# 33 list write persistent buffer offset# 20480 size# 4096 lsn# 34 letter# 0 read persistent buffer offset# 20480 size# 4096 lsn# 34 erase persistent buffer offset# 20480 size# 4096 lsn# 34 erase persistent buffer offset# 20480 size# 4096 lsn# 34 write persistent buffer offset# 49152 size# 12288 lsn# 35 letter# 1 sync persistent buffer offset# 49152 size# 12288 lsn# 35 write offset# 4096 size# 12288 letter# 2 write persistent buffer offset# 16384 size# 4096 lsn# 36 letter# 3 erase persistent buffer offset# 16384 size# 4096 lsn# 36 erase persistent buffer offset# 16384 size# 4096 lsn# 36 list read offset# 36864 size# 8192 list write offset# 28672 size# 8192 letter# 4 read offset# 57344 size# 8192 write persistent buffer offset# 45056 size# 12288 lsn# 37 letter# 5 erase persistent buffer offset# 45056 size# 12288 lsn# 37 erase persistent buffer offset# 45056 size# 12288 lsn# 37 write persistent buffer offset# 0 size# 4096 lsn# 38 letter# 6 list write offset# 45056 size# 12288 letter# 7 read persistent buffer offset# 0 size# 4096 lsn# 38 list read persistent buffer offset# 0 size# 4096 lsn# 38 list erase persistent buffer offset# 0 size# 4096 lsn# 38 erase persistent buffer offset# 0 size# 4096 lsn# 38 read offset# 49152 size# 12288 read offset# 12288 size# 12288 read offset# 16384 size# 4096 write persistent buffer offset# 16384 size# 8192 lsn# 39 letter# 8 read persistent buffer offset# 16384 size# 8192 lsn# 39 write offset# 53248 size# 12288 letter# 9 read persistent buffer offset# 16384 size# 8192 lsn# 39 sync persistent buffer offset# 16384 size# 8192 lsn# 39 write offset# 4096 size# 4096 letter# A write offset# 53248 size# 8192 letter# B read offset# 40960 size# 12288 read offset# 20480 size# 8192 write persistent buffer offset# 28672 size# 8192 lsn# 40 letter# C write offset# 16384 size# 8192 letter# D erase persistent buffer offset# 28672 size# 8192 lsn# 40 erase persistent buffer offset# 28672 size# 8192 lsn# 40 list list write persistent buffer offset# 32768 size# 8192 lsn# 41 letter# E sync persistent buffer offset# 32768 size# 8192 lsn# 41 read offset# 32768 size# 8192 read offset# 24576 size# 8192 read offset# 57344 size# 8192 write offset# 45056 size# 12288 letter# F read offset# 45056 size# 4096 write offset# 36864 size# 8192 letter# G write persistent buffer offset# 53248 size# 8192 lsn# 42 letter# H read persistent buffer offset# 53248 size# 8192 lsn# 42 read persistent buffer offset# 53248 size# 8192 lsn# 42 write persistent buffer offset# 61440 size# 4096 lsn# 43 letter# I read persistent buffer offset# 53248 size# 8192 lsn# 42 read offset# 57344 size# 4096 sync persistent buffer offset# 53248 size# 8192 lsn# 42 sync persistent buffer offset# 61440 size# 4096 lsn# 43 list read offset# 20480 size# 8192 read offset# 40960 size# 4096 list read offset# 20480 size# 12288 list write persistent buffer offset# 36864 size# 8192 lsn# 44 letter# J write offset# 8192 size# 8192 letter# K read persistent buffer offset# 36864 size# 8192 lsn# 44 sync persistent buffer offset# 36864 size# 8192 lsn# 44 write persistent buffer offset# 61440 size# 4096 lsn# 45 letter# L sync persistent buffer offset# 61440 size# 4096 lsn# 45 read offset# 24576 size# 12288 read offset# 45056 size# 8192 read offset# 28672 size# 8192 write persistent buffer offset# 12288 size# 12288 lsn# 46 letter# M write persistent buffer offset# 36864 size# 12288 lsn# 47 letter# N read persistent buffer offset# 36864 size# 12288 lsn# 47 erase persistent buffer offset# 12288 size# 12288 lsn# 46 erase persistent buffer offset# 12288 size# 12288 lsn# 46 read persistent buffer offset# 36864 size# 12288 lsn# 47 sync persistent buffer offset# 36864 size# 12288 lsn# 47 write offset# 36864 size# 4096 letter# O write persistent buffer offset# 57344 size# 4 ... 94382858 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.9994628429 erase persistent buffer offset# 0 size# 294912 lsn# 260 erase persistent buffer offset# 0 size# 294912 lsn# 260 freeSpace: 0.9994802475 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.9994885921 erase persistent buffer offset# 0 size# 278528 lsn# 332 erase persistent buffer offset# 0 size# 278528 lsn# 332 freeSpace: 0.999505043 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 freeSpace: 0.9995098114 erase persistent buffer offset# 0 size# 61440 lsn# 28 erase persistent buffer offset# 0 size# 61440 lsn# 28 freeSpace: 0.9995136261 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.9995298386 erase persistent buffer offset# 0 size# 12288 lsn# 914 erase persistent buffer offset# 0 size# 12288 lsn# 914 freeSpace: 0.9995307922 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.9995532036 erase persistent buffer offset# 0 size# 24576 lsn# 971 erase persistent buffer offset# 0 size# 24576 lsn# 971 freeSpace: 0.9995548725 batch erase persistent buffer count# 4 batch erase persistent buffer count# 4 freeSpace: 0.9996147156 erase persistent buffer offset# 0 size# 36864 lsn# 210 erase persistent buffer offset# 0 size# 36864 lsn# 210 freeSpace: 0.9996170998 batch erase persistent buffer count# 4 batch erase persistent buffer count# 4 freeSpace: 0.9996643066 erase persistent buffer offset# 0 size# 86016 lsn# 545 erase persistent buffer offset# 0 size# 86016 lsn# 545 freeSpace: 0.9996695518 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 freeSpace: 0.999704361 erase persistent buffer offset# 0 size# 495616 lsn# 512 erase persistent buffer offset# 0 size# 495616 lsn# 512 freeSpace: 0.999733448 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 freeSpace: 0.9997758865 erase persistent buffer offset# 0 size# 217088 lsn# 129 erase persistent buffer offset# 0 size# 217088 lsn# 129 freeSpace: 0.9997887611 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 freeSpace: 0.9998171329 erase persistent buffer offset# 0 size# 131072 lsn# 293 erase persistent buffer offset# 0 size# 131072 lsn# 293 freeSpace: 0.9998250008 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.9998526573 erase persistent buffer offset# 0 size# 131072 lsn# 399 erase persistent buffer offset# 0 size# 131072 lsn# 399 freeSpace: 0.9998605251 batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 freeSpace: 0.999915123 erase persistent buffer offset# 0 size# 258048 lsn# 929 erase persistent buffer offset# 0 size# 258048 lsn# 929 freeSpace: 0.9999303818 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 freeSpace: 0.9999754429 erase persistent buffer offset# 0 size# 188416 lsn# 389 erase persistent buffer offset# 0 size# 188416 lsn# 389 freeSpace: 0.9999866486 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 freeSpace: 0.999994278 erase persistent buffer offset# 0 size# 94208 lsn# 482 erase persistent buffer offset# 0 size# 94208 lsn# 482 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 Restarting node 1 2026-02-19T08:18:28.219332Z 8 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:33510:3985] ServerId# [1:33850:143] TabletId# 72057594037932033 PipeClientId# [8:33510:3985] 2026-02-19T08:18:28.219490Z 7 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:33511:3986] ServerId# [1:33851:144] TabletId# 72057594037932033 PipeClientId# [7:33511:3986] 2026-02-19T08:18:28.219650Z 6 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:33512:3986] ServerId# [1:33852:145] TabletId# 72057594037932033 PipeClientId# [6:33512:3986] 2026-02-19T08:18:28.219792Z 5 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:33513:3982] ServerId# [1:33853:146] TabletId# 72057594037932033 PipeClientId# [5:33513:3982] 2026-02-19T08:18:28.219879Z 4 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:33514:3982] ServerId# [1:33854:147] TabletId# 72057594037932033 PipeClientId# [4:33514:3982] 2026-02-19T08:18:28.220001Z 3 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:33515:3982] ServerId# [1:33855:148] TabletId# 72057594037932033 PipeClientId# [3:33515:3982] 2026-02-19T08:18:28.220120Z 2 00h08m30.008168s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:33516:3983] ServerId# [1:33856:149] TabletId# 72057594037932033 PipeClientId# [2:33516:3983] freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 Restarting node 1 2026-02-19T08:18:33.974132Z 8 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:37845:4502] ServerId# [1:38185:143] TabletId# 72057594037932033 PipeClientId# [8:37845:4502] 2026-02-19T08:18:33.974417Z 7 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:37846:4503] ServerId# [1:38186:144] TabletId# 72057594037932033 PipeClientId# [7:37846:4503] 2026-02-19T08:18:33.974582Z 6 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:37847:4504] ServerId# [1:38187:145] TabletId# 72057594037932033 PipeClientId# [6:37847:4504] 2026-02-19T08:18:33.974734Z 5 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:37848:4499] ServerId# [1:38188:146] TabletId# 72057594037932033 PipeClientId# [5:37848:4499] 2026-02-19T08:18:33.974856Z 4 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:37849:4500] ServerId# [1:38189:147] TabletId# 72057594037932033 PipeClientId# [4:37849:4500] 2026-02-19T08:18:33.975029Z 3 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:37850:4499] ServerId# [1:38190:148] TabletId# 72057594037932033 PipeClientId# [3:37850:4499] 2026-02-19T08:18:33.975248Z 2 00h09m30.008680s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:37851:4500] ServerId# [1:38191:149] TabletId# 72057594037932033 PipeClientId# [2:37851:4500] freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 freeSpace: 1 >> IndexBuildTest::RejectsDropIndexUniq [GOOD] >> IndexBuildTest::RejectsCancelUniq >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> FulltextIndexBuildTest::FlatRelevanceLimit [GOOD] >> IndexBuildTest::BaseCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: 2026-02-19T08:17:08.932860Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487444370909982:2245];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:08.932920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:08.956691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:09.215033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:09.215167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:09.218681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:09.256078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:09.303666Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:09.318626Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487444370909762:2081] 1771489028906321 != 1771489028906324 2026-02-19T08:17:09.360578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:09.360607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:09.360616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:09.360731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:09.505636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:09.784204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:09.933232Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:11.922045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487457255812552:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:11.924008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487457255812546:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:11.924179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:11.924766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487457255812561:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:11.924807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:11.928156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:11.940758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487457255812560:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:17:12.031946Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487461550779911:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:12.886503Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487460548837582:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:12.887099Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:12.901839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:12.970299Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:12.971547Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608487460548837522:2081] 1771489032879800 != 1771489032879803 2026-02-19T08:17:13.002767Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:13.002850Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:13.006217Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:13.068146Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:13.068168Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:13.068175Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:13.068268Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:13.181292Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:13.517578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:13.897977Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:16.580088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487477728707593:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.580242Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.580787Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487477728707606:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.580792Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487477728707605:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.580863Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.585365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:16.604565Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487477728707609:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:17:16.677472Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487477728707660:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:17.670536Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608487481170733872:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:17.670959Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:17.684792Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:17.760302Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:17.797106Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968 ... meshard: 72057594046644480 2026-02-19T08:18:27.397701Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7608487760876281182:2141];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:27.397855Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:28.406701Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608487786646085762:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.406860Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.407320Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608487786646085772:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.407433Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.434756Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:28.513802Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:28.592009Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608487786646085939:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.592155Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.592475Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608487786646085944:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.592535Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608487786646085945:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.592707Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:28.597749Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:28.611107Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7608487786646085948:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:18:28.715747Z node 11 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [11:7608487786646085999:2651] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-02-19T08:18:37.483283Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:37.491235Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:18:37.496900Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:18:37.497327Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:37.497532Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:18:37.882344Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:37.882540Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:37.922288Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:34:2081] 1771489112190021 != 1771489112190024 2026-02-19T08:18:37.927151Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:37.984278Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:38.071297Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:18:38.425083Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:38.442636Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:38.576867Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:870:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:38.577045Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:881:2757], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:38.577180Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:38.578787Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:886:2762], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:38.579344Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:38.591281Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:38.719419Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:884:2760], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:18:38.747949Z node 12 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [12:941:2798] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "8ce6e47b-8dd3c4cb-9e99c97a-769684a9" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'219) \'(\'\"_id\" \'\"a2b892e0-ab7f9304-4335fcd7-a93200e8\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::DropIndexUniq >> TestSqsTopicHttpProxy::TestSetQueueAttributesUnknownAttribute [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> KqpPg::PgUpdate-useSink [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq3Consumers [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::WithFollowersUniq >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> PartitionStats::CollectorOverload [GOOD] >> TestSqsTopicHttpProxy::TestSetQueueAttributesRedrivePolicy >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq0Consumers |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: 2026-02-19T08:17:45.044731Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-02-19T08:17:45.205378Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:45.205500Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:45.205573Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.205648Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-02-19T08:17:45.242904Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.324595Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-02-19T08:17:45.329838Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-02-19T08:17:45.332851Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-02-19T08:17:45.339213Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-02-19T08:17:45.341315Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-02-19T08:17:45.372932Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.373407Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a6d5038-2cbcf7b-a553518a-e4e6ff98_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.425569Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.426176Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8c76f951-991c29d5-55519d03-770e230_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.601402Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.601966Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|46ca4517-6a5fe2f8-347a6b97-36764a1a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:245:2057] recipient: [1:104:2137] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:248:2057] recipient: [1:247:2242] Leader for TabletID 72057594037927937 is [1:249:2243] sender: [1:250:2057] recipient: [1:247:2242] 2026-02-19T08:17:45.697302Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:45.697409Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:45.698455Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.698513Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:17:45.699259Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:298:2243] 2026-02-19T08:17:45.701682Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:299:2243] 2026-02-19T08:17:45.715087Z node 1 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:17:45.715478Z node 1 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:17:45.715844Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:298:2243] 2026-02-19T08:17:45.716053Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:299:2243] 2026-02-19T08:17:45.732971Z node 1 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-02-19T08:17:45.735914Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [1:249:2243] sender: [1:327:2057] recipient: [1:14:2061] Got start offset = 0 2026-02-19T08:17:46.573774Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-02-19T08:17:46.780999Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:46.781074Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:46.781128Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.781210Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] 2026-02-19T08:17:46.802464Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.803402Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 2 actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-02-19T08:17:46.804133Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:189:2142] 2026-02-19T08:17:46.807035Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:189:2142] 2026-02-19T08:17:46.808920Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:190:2142] 2026-02-19T08:17:46.811011Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:190:2142] 2026-02-19T08:17:46.818504Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:46.818965Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|302ca8fe-2e6aba72-6aa2a36d-b2048415_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:46.842976Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:46.843571Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2fbb209f-3630bb5c-5bc6bcef-be15d844_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:46.887245Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:46.887777Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d3cc74df-b3a61c2d-498ae2bb-58637e4f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:243:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:246:2057] recipient: [2:245:2241] Leader for TabletID 72057594037927937 is [2:247:2242] sender: [2:248:2057] recipient: [2:245:2241] 2026-02-19T08:17:47.019233Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:47.019324Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, E ... : 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 } 2026-02-19T08:18:41.881036Z node 55 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:189:2142] 2026-02-19T08:18:41.889086Z node 55 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:189:2142] 2026-02-19T08:18:41.896087Z node 55 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:190:2142] 2026-02-19T08:18:41.898849Z node 55 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:190:2142] 2026-02-19T08:18:41.908760Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:41.909203Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7beb3bf4-2f5dbf56-1832579d-b15e8529_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:41.929132Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:41.929666Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|af082925-5be503e7-bb8c258e-3ce00727_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:41.975484Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:41.975961Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8313cd44-da834808-31aae015-35413a4d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:244:2057] recipient: [55:103:2137] Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:247:2057] recipient: [55:246:2242] Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:249:2057] recipient: [55:246:2242] 2026-02-19T08:18:42.071629Z node 55 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:18:42.071694Z node 55 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:18:42.072689Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:42.072750Z node 55 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:18:42.073965Z node 55 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:297:2243] 2026-02-19T08:18:42.076994Z node 55 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:298:2243] 2026-02-19T08:18:42.089676Z node 55 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:18:42.090362Z node 55 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:18:42.091384Z node 55 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2243] 2026-02-19T08:18:42.091652Z node 55 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2243] 2026-02-19T08:18:42.105101Z node 55 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-02-19T08:18:42.109034Z node 55 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 55 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:326:2057] recipient: [55:14:2061] Got start offset = 0 2026-02-19T08:18:42.663606Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 56 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:112:2057] recipient: [56:105:2138] 2026-02-19T08:18:42.743343Z node 56 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:18:42.743417Z node 56 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:18:42.743473Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:42.743536Z node 56 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927938 is [56:157:2176] sender: [56:158:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:181:2057] recipient: [56:14:2061] 2026-02-19T08:18:42.768921Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:42.769933Z node 56 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 56 actor [56:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 } 2026-02-19T08:18:42.770763Z node 56 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:187:2142] 2026-02-19T08:18:42.775743Z node 56 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [56:187:2142] 2026-02-19T08:18:42.777607Z node 56 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:188:2142] 2026-02-19T08:18:42.779977Z node 56 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [56:188:2142] 2026-02-19T08:18:42.788597Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:42.789037Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|758281bb-7a762d7b-50fc665f-6d31df22_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:42.810439Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:42.811048Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|52ea2f64-1d1538f2-3b2743-166782e7_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:18:42.857689Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:18:42.858322Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c3ac1a9b-d46d696e-fa156ae8-4e84c77e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:242:2057] recipient: [56:103:2137] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:245:2057] recipient: [56:244:2240] Leader for TabletID 72057594037927937 is [56:246:2241] sender: [56:247:2057] recipient: [56:244:2240] 2026-02-19T08:18:42.957673Z node 56 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:18:42.957759Z node 56 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:18:42.958632Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:18:42.958681Z node 56 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:18:42.959671Z node 56 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:295:2241] 2026-02-19T08:18:42.961994Z node 56 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:296:2241] 2026-02-19T08:18:42.970174Z node 56 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:18:42.970469Z node 56 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:18:42.971124Z node 56 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [56:296:2241] 2026-02-19T08:18:42.971403Z node 56 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [56:295:2241] 2026-02-19T08:18:42.982820Z node 56 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-02-19T08:18:42.985226Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 56 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037927937 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [56:246:2241] sender: [56:324:2057] recipient: [56:14:2061] Got start offset = 0 |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TestSqsTopicHttpProxy::TestSetQueueAttributesBasic [GOOD] >> IndexBuildTest::WithFollowersUniq [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq >> IndexBuildTest::DropIndexUniq [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |91.6%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> TestSqsTopicHttpProxy::TestSetQueueAttributesFifoImmutable |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndexUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:29.502091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:29.502179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.502250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:29.502308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:29.502355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:29.502391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:29.502414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:29.502445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:29.502503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.502586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:29.503586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:29.503903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:29.598446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:29.598506Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:29.620033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:29.620739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:29.620955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:29.633704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:29.634423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:29.635157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:29.635465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:29.638549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.638730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:29.639888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:29.639946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.640059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:29.640107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:29.640150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:29.640322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:29.809735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.810801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.810927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.811800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 594046678944, cookie: 105 2026-02-19T08:18:44.447432Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.447476Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-02-19T08:18:44.447521Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 5 2026-02-19T08:18:44.448477Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.448564Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.448600Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.448635Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-02-19T08:18:44.448672Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-02-19T08:18:44.458658Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.458776Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.458818Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.458862Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-02-19T08:18:44.458908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:18:44.459797Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.459896Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.459933Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.462367Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:18:44.462432Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:18:44.462701Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-02-19T08:18:44.462839Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-02-19T08:18:44.462876Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-02-19T08:18:44.462917Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-02-19T08:18:44.462953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-02-19T08:18:44.462995Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2026-02-19T08:18:44.463600Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.463691Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.463726Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.463888Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.463956Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:18:44.463987Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:18:44.464021Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-02-19T08:18:44.464062Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 4 2026-02-19T08:18:44.464156Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2026-02-19T08:18:44.465673Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2026-02-19T08:18:44.465798Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:18:44.466039Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-02-19T08:18:44.466169Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-02-19T08:18:44.466204Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-02-19T08:18:44.466245Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-02-19T08:18:44.466275Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-02-19T08:18:44.466312Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2026-02-19T08:18:44.466379Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:819:2774] message: TxId: 105 2026-02-19T08:18:44.466425Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-02-19T08:18:44.466469Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-02-19T08:18:44.466501Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:0 2026-02-19T08:18:44.466610Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-02-19T08:18:44.466651Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:1 2026-02-19T08:18:44.466675Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:1 2026-02-19T08:18:44.466706Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 2 2026-02-19T08:18:44.466731Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:2 2026-02-19T08:18:44.466752Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:2 2026-02-19T08:18:44.466796Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-02-19T08:18:44.467472Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.468728Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.468831Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.468870Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.471012Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.471137Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:18:44.471330Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:18:44.471371Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1342:3264] TestWaitNotification: OK eventTxId 105 |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 |91.6%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ClusterBalancing::ClusterBalancingEvenDistribution |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true [GOOD] >> VectorIndexBuildTest::CancelBroken |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |91.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret >> IndexBuildTest::RejectsCancelUniq [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> CdcStreamChangeCollector::UpsertManyRows >> TDataShardLocksTest::MvccTestWriteBreaksLocks >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> IndexBuildTest::RejectsOnDuplicatesUniq [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityZeroTimeout [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |91.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |91.6%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TestSqsTopicHttpProxy::TestCreateQueue >> TestSqsTopicHttpProxy::TestSetQueueAttributesRedrivePolicy [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdDlq0Consumers [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessage [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TestSqsTopicHttpProxy::TestSetQueueAttributesRetentionPeriod >> TestSqsTopicHttpProxy::TestCreateQueueWithCustomConsumer >> VectorIndexBuildTest::CancelBroken [GOOD] >> VectorIndexBuildTest::CreateBuildProposeReject >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdWithConsumersRetentionExtended |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> TestSqsTopicHttpProxy::TestSetQueueAttributesFifoImmutable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] >> TestSqsTopicHttpProxy::TestSetQueueAttributesInvalidValues |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/ut/ydb-core-kqp-common-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/common/ut/ydb-core-kqp-common-ut |91.6%| [LD] {RESULT} $(B)/ydb/core/kqp/common/ut/ydb-core-kqp-common-ut >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] >> VectorIndexBuildTest::GlobalLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:29.367790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:29.367878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.367957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:29.368012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:29.368055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:29.368095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:29.368130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:29.368156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:29.368240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.368327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:29.369188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:29.369510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:29.484773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:29.484845Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:29.498912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:29.501353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:29.501605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:29.521576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:29.522554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:29.523399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:29.523696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:29.529589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.529819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:29.531064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:29.531131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.531242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:29.531287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:29.531348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:29.531554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:29.681958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.683521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.683659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.683729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.683875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.683939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.684655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DROP LOCK, path: /MyRoot/ServerLessDB/Table/test_index/indexImplTable 2026-02-19T08:18:53.892341Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7304: Handle: TEvModifySchemeTransactionResult: txId# 281474976725762, status# StatusPathDoesNotExist 2026-02-19T08:18:53.892422Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7306: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549 2026-02-19T08:18:53.892539Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3154: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 107, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-02-19T08:18:53.892696Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3159: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500), SubscribersCount: 1, CreateSender: [6:1664:3531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-02-19T08:18:53.892872Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:18:53.895094Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected 2026-02-19T08:18:53.895241Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/Table/test_index', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760, SubscribersCount: 1, CreateSender: [6:1664:3531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:18:53.895297Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-02-19T08:18:53.895479Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:18:53.895541Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:1681:3548] TestWaitNotification: OK eventTxId 107 2026-02-19T08:18:53.896352Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-02-19T08:18:53.896682Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-02-19T08:18:53.897670Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:18:53.899913Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 2.25ms result status StatusSuccess 2026-02-19T08:18:53.900539Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:18:53.901515Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 108 DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-02-19T08:18:53.901915Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:102: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 108 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 108 Status: SUCCESS 2026-02-19T08:18:53.905926Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" PageSize: 100 PageToken: "" 2026-02-19T08:18:53.906067Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |91.6%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TestSqsTopicHttpProxy::TestCreateQueue [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:36.565544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:36.565624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:36.565686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:36.565750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:36.565797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:36.565836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:36.565868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:36.565892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:36.565963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:36.566455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:36.567303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:36.567573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:36.650544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:36.650602Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:36.665431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:36.666105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:36.666296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:36.677346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:36.678041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:36.678781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:36.679094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:36.682108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:36.682310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:36.683548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:36.683625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:36.683738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:36.683784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:36.683825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:36.684018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:36.832820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.833867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.834970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:36.835045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1664:3531], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 11298, Billed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 11298} 2026-02-19T08:18:56.833440Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-02-19T08:18:56.833590Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:18:56.833675Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:2329:4102] TestWaitNotification: OK eventTxId 107 2026-02-19T08:18:56.834649Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:18:56.834988Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 386us result status StatusSuccess 2026-02-19T08:18:56.835643Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "prefix" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 20 levels: 2 overlap_clusters: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:18:56.836698Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-02-19T08:18:56.837653Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" took 934us result status StatusSuccess 2026-02-19T08:18:56.839125Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 35 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:18:56.864712Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268830210, Sender [6:3445:5166], Recipient [6:1743:3603]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "(\n (let range \'(\'(\'__ydb_parent (Null) (Void)) \'(\'key (Null) (Void)) ))\n (let columns \'(\'__ydb_parent))\n (let result (SelectRange \'__user__indexImplPostingTable range columns \'()))\n (return (AsList (SetResult \'Result result) ))\n )" } } ... posting table contains 10 rows >> TestSqsTopicHttpProxy::TestCreateQueueFifo >> AsyncIndexChangeCollector::UpsertToSameKey >> TestSqsTopicHttpProxy::TestCreateQueueWithCustomConsumer [GOOD] >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::BaseCaseUniq >> TestSqsTopicHttpProxy::TestDeleteMessageBatch >> TestSqsTopicHttpProxy::TestSetQueueAttributesRetentionPeriod [GOOD] >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueWithSameNameAndSameParams >> VectorIndexBuildTest::GlobalLimit [GOOD] >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdWithConsumersRetentionExtended [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] Test command err: RandomSeed# 8030795830309354283 2026-02-19T08:18:54.049629Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:54.052113Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13380182706131286011] 2026-02-19T08:18:54.085966Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:54.254089Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:54.256071Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1943411322695137402] 2026-02-19T08:18:54.276885Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:54.366497Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:54.368547Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4283608843403547650] 2026-02-19T08:18:54.381198Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:54.521288Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:54.523345Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16506672060400012409] 2026-02-19T08:18:54.535839Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:55.087487Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:55.089495Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6467921490378060617] 2026-02-19T08:18:55.100415Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:55.222487Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:55.224525Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7470223956017185259] 2026-02-19T08:18:55.243842Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:55.351716Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:55.353816Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4858964991823441009] 2026-02-19T08:18:55.367401Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:55.541738Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:55.543911Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17082957697818982693] 2026-02-19T08:18:55.564951Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:56.339423Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:56.341599Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8138251784202862391] 2026-02-19T08:18:56.353859Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:56.475198Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:56.477366Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17350969067678436112] 2026-02-19T08:18:56.507579Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:56.617009Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:56.619151Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16012917162132871624] 2026-02-19T08:18:56.632726Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:56.769965Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:56.771643Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10305772004676206668] 2026-02-19T08:18:56.782932Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:57.507094Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:57.509658Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10218141994620521406] 2026-02-19T08:18:57.522153Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:57.663156Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:57.664791Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8020461422901469836] 2026-02-19T08:18:57.689988Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:57.812632Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:57.815141Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2304658143017970336] 2026-02-19T08:18:57.846775Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2026-02-19T08:18:58.104924Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:18:58.107516Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17348530078429426336] 2026-02-19T08:18:58.125542Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdWithConsumersRetentionShrinked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::GlobalLimit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:27.704169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:27.704264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:27.704326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:27.704380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:27.704423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:27.704459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:27.704488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:27.704512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:27.704568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:27.704661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:27.705447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:27.705774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:27.795102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:27.795171Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:27.807573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:27.808301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:27.808514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:27.820100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:27.820861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:27.821609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:27.821985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:27.825401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:27.825588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:27.826849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:27.826914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:27.827035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:27.827078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:27.827117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:27.827298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:27.947477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.948983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:27.949521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... SS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:18:59.179836Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 4, paths: 4, delta: 1, SubscribersCount: 1, CreateSender: [6:1082:2984], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:18:59.180181Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976735761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976735761 msg type: 269090816 2026-02-19T08:18:59.180329Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976735761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976735761 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976735761 at step: 5000042 2026-02-19T08:18:59.180612Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976735761, at schemeshard: 72057594046678944 2026-02-19T08:18:59.180647Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976735761, ready parts: 0/1, is published: true 2026-02-19T08:18:59.180683Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976735761, at schemeshard: 72057594046678944 2026-02-19T08:18:59.180824Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:59.180910Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735761 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 25769805926 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:18:59.180961Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976735761:0 HandleReply TEvOperationPlan: step# 5000042 2026-02-19T08:18:59.181029Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976735761:0 Propose -> Done 2026-02-19T08:18:59.182864Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976735761:0, at schemeshard: 72057594046678944 2026-02-19T08:18:59.182906Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976735761:0 ProgressState 2026-02-19T08:18:59.182975Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:0 progress is 1/1 2026-02-19T08:18:59.183005Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 1/1 2026-02-19T08:18:59.183036Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:0 progress is 1/1 2026-02-19T08:18:59.183057Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 1/1 2026-02-19T08:18:59.183105Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976735761, ready parts: 1/1, is published: true 2026-02-19T08:18:59.183151Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:1023:2937] message: TxId: 281474976735761 2026-02-19T08:18:59.183194Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 1/1 2026-02-19T08:18:59.183226Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976735761:0 2026-02-19T08:18:59.183249Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976735761:0 2026-02-19T08:18:59.183301Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976735761 2026-02-19T08:18:59.184618Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976735761 2026-02-19T08:18:59.184658Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976735761 2026-02-19T08:18:59.184702Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976735761 2026-02-19T08:18:59.184782Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 4, paths: 4, delta: 1, SubscribersCount: 1, CreateSender: [6:1082:2984], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976735761 2026-02-19T08:18:59.186019Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-02-19T08:18:59.186097Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 4, paths: 4, delta: 1, SubscribersCount: 1, CreateSender: [6:1082:2984], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:18:59.186143Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:18:59.204420Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-02-19T08:18:59.204546Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: embedding, State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 4, paths: 4, delta: 1, SubscribersCount: 1, CreateSender: [6:1082:2984], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:18:59.204579Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-02-19T08:18:59.204703Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:18:59.204745Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1286:3143] TestWaitNotification: OK eventTxId 105 >> CdcStreamChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> TestSqsTopicHttpProxy::TestSetQueueAttributesInvalidValues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSetQueueAttributesRetentionPeriod [GOOD] Test command err: 2026-02-19T08:18:11.355671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487715131662279:2145];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:11.450360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:11.955382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:11.987840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:11.987930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:12.227937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:12.342478Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:12.345976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487715131662172:2081] 1771489091261804 != 1771489091261807 2026-02-19T08:18:12.357836Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:12.381994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:12.611045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:12.611067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:12.611079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:12.611194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:13.019308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:13.033406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:18:13.389113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:13.403660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:18:13.406771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:13.448401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:18:13.619249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.881063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.946778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:18:13.951936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.025989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:18:14.031024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.100476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.156935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.202189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.279978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.339128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.379338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:16.272170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487715131662279:2145];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:16.272292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:16.677004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487736606500323:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.677130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.677487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487736606500335:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.677529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487736606500336:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.677640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.684095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:16.696466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-02-19T08:18:16.697190Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487736606500339:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:18:16.792935Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487736606500390:3110] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:17.272955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.397543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "ydb-sqs-consumer" Generation: 0 AvailabilityPeriodMs: 3600000 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false } MonitoringProjectId: "" 2026-02-19T08:18:59.378556Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS Http output full {"QueueUrl":"http://ghrun-fkrokuvqke.auto.internal:8771/v1/5//Root/21/TestSetAttrsRetention/16/ydb-sqs-consumer"} 2026-02-19T08:18:59.386194Z node 6 :SQS INFO: http_req.cpp:1237: Request [6c4d4664-5f31106b-9c672b1a-2e104e37] Request done. Api [SqsTopic] Action [CreateQueue] User [AKIDEXAMPLE] Database [/Root] IP [580b:5240:a97b:0:400b:5240:a97b:0] Duration [86ms] Code [200] 2026-02-19T08:18:59.386226Z node 6 :HTTP_PROXY INFO: http_req.cpp:1667: http request [CreateQueue] requestId [6c4d4664-5f31106b-9c672b1a-2e104e37] reply ok 2026-02-19T08:18:59.386474Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:33756) <- (200 , 113 bytes) 2026-02-19T08:18:59.386562Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:33756) connection closed 2026-02-19T08:18:59.392106Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#40,[::1]:33758) incoming connection opened 2026-02-19T08:18:59.392179Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#40,[::1]:33758) -> (POST /Root, 142 bytes) 2026-02-19T08:18:59.392359Z node 6 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [5898:9740:a97b:0:4098:9740:a97b:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: f6f7ca9b-bc2a343b-ece50dec-fab5b756 2026-02-19T08:18:59.392791Z node 6 :HTTP_PROXY INFO: http_req.cpp:1385: http request [SetQueueAttributes] requestId [f6f7ca9b-bc2a343b-ece50dec-fab5b756] got new request from [5898:9740:a97b:0:4098:9740:a97b:0] database '/Root' stream '' 2026-02-19T08:18:59.393286Z node 6 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SetQueueAttributes] requestId [f6f7ca9b-bc2a343b-ece50dec-fab5b756] [auth] Authorized successfully 2026-02-19T08:18:59.393341Z node 6 :HTTP_PROXY INFO: http_req.cpp:1117: http request [SetQueueAttributes] requestId [f6f7ca9b-bc2a343b-ece50dec-fab5b756] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:18:59.395936Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:18:59.399665Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3668: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-02-19T08:18:59.401188Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1060: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-02-19T08:18:59.403979Z node 6 :PERSQUEUE DEBUG: partition.cpp:1295: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1771489139448, TxId 281474976710691 2026-02-19T08:18:59.404043Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:59.404065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2445: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-02-19T08:18:59.404101Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:18:59.404115Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:59.404130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:18:59.404141Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:18:59.404394Z node 6 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771489139448, TxId 281474976710691 2026-02-19T08:18:59.404433Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:59.404446Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:18:59.404457Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:59.404482Z node 6 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:18:59.404625Z node 6 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710691 2026-02-19T08:18:59.404639Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:18:59.404654Z node 6 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:18:59.404671Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:18:59.404872Z node 6 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:18:59.406348Z node 6 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:18:59.406562Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-02-19T08:18:59.406576Z node 6 :PERSQUEUE INFO: partition_mlp.cpp:156: [72075186224037907][Partition][0][StateIdle] Updating MLP consumer 'ydb-sqs-consumer' config 2026-02-19T08:18:59.406790Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:18:59.406811Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:59.406826Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:59.406837Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:59.406851Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:59.406861Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:18:59.406880Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:18:59.407287Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 7200 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "TestSetAttrsRetention" Version: 2 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { Ids: 0 Codecs: "raw" } TopicPath: "/Root/TestSetAttrsRetention" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037907 } Consumers { Name: "ydb-sqs-consumer" Generation: 0 AvailabilityPeriodMs: 7200000 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false } MonitoringProjectId: "" 2026-02-19T08:18:59.407324Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS Http output full {} 2026-02-19T08:18:59.414875Z node 6 :SQS INFO: http_req.cpp:1237: Request [f6f7ca9b-bc2a343b-ece50dec-fab5b756] Request done. Api [SqsTopic] Action [SetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [TestSetAttrsRetention] IP [5898:9740:a97b:0:4098:9740:a97b:0] Duration [22ms] Code [200] 2026-02-19T08:18:59.414914Z node 6 :HTTP_PROXY INFO: http_req.cpp:1667: http request [SetQueueAttributes] requestId [f6f7ca9b-bc2a343b-ece50dec-fab5b756] reply ok 2026-02-19T08:18:59.415173Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#40,[::1]:33758) <- (200 , 2 bytes) 2026-02-19T08:18:59.415288Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#40,[::1]:33758) connection closed 2026-02-19T08:18:59.416632Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#40,[::1]:33772) incoming connection opened 2026-02-19T08:18:59.416710Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#40,[::1]:33772) -> (POST /Root, 139 bytes) 2026-02-19T08:18:59.416850Z node 6 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f8ea:8d3f:a97b:0:e0ea:8d3f:a97b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 8322ddfe-dee56c97-e9425c70-f012fd55 2026-02-19T08:18:59.417217Z node 6 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [8322ddfe-dee56c97-e9425c70-f012fd55] got new request from [f8ea:8d3f:a97b:0:e0ea:8d3f:a97b:0] database '/Root' stream '' 2026-02-19T08:18:59.417721Z node 6 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [8322ddfe-dee56c97-e9425c70-f012fd55] [auth] Authorized successfully 2026-02-19T08:18:59.417767Z node 6 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [8322ddfe-dee56c97-e9425c70-f012fd55] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Attributes":{"MessageRetentionPeriod":"7200"}} 2026-02-19T08:18:59.418778Z node 6 :SQS INFO: http_req.cpp:1237: Request [8322ddfe-dee56c97-e9425c70-f012fd55] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [TestSetAttrsRetention] IP [f8ea:8d3f:a97b:0:e0ea:8d3f:a97b:0] Duration [4ms] Code [200] 2026-02-19T08:18:59.418803Z node 6 :HTTP_PROXY INFO: http_req.cpp:1667: http request [GetQueueAttributes] requestId [8322ddfe-dee56c97-e9425c70-f012fd55] reply ok 2026-02-19T08:18:59.419000Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#40,[::1]:33772) <- (200 , 48 bytes) 2026-02-19T08:18:59.419072Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#40,[::1]:33772) connection closed 2026-02-19T08:18:59.469806Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:18:59.469843Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:59.469856Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:18:59.469876Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:18:59.469889Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist >> TDataShardLocksTest::UseLocksCache [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:18:52.850142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:52.987254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:18:53.005741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:53.005859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:18:53.006517Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:18:53.364614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:53.364767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:53.438727Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489129830983 != 1771489129830987 2026-02-19T08:18:53.446941Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:53.494076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:53.600374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:18:53.966234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:53.982349Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:54.112990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:54.187785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:898:2773]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:18:54.188943Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:898:2773]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:18:54.189247Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:898:2773] 2026-02-19T08:18:54.189499Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:54.200002Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:884:2765], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:18:54.241431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:898:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:18:54.241944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:884:2765], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:18:54.242233Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-02-19T08:18:54.242459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:54.251197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:884:2765], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:18:54.251910Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:54.252055Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:54.253898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:18:54.253998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:18:54.254054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:18:54.254421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:54.254528Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:54.254626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2773] in generation 1 2026-02-19T08:18:54.255143Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:54.255238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:54.256497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:18:54.256555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:18:54.256625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:18:54.256904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:54.256985Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:54.257039Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:930:2775] in generation 1 2026-02-19T08:18:54.268255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:18:54.316667Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:18:54.316977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:18:54.317169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:933:2794] 2026-02-19T08:18:54.317212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:18:54.317273Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:18:54.317314Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:54.317664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:898:2773], Recipient [1:898:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.317737Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.317875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:18:54.317919Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:18:54.318040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:18:54.318127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:934:2795] 2026-02-19T08:18:54.318153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:18:54.318179Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:18:54.318204Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:18:54.318643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:900:2775], Recipient [1:900:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.318681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.318861Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:18:54.319000Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:18:54.319172Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:18:54.319220Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:18:54.319273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:18:54.319314Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:18:54.319349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:18:54.319380Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:18:54.319430Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:18:54.319492Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:18:54.319571Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:18:54.319714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:903:2776], Recipient [1:898:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:54.319751Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:54.319807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:903:2776], sessionId# [0:0:0] 2026-02-19T08:18:54.319873Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:18:54.319899Z node 1 :TX_DAT ... 24037888 is Executed 2026-02-19T08:19:02.325483Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:19:02.325511Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037888 has finished 2026-02-19T08:19:02.337057Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:19:02.337126Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:19:02.337164Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:19:02.337216Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:19:02.337303Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:02.337440Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [2:1160:2955], Recipient [2:902:2776]: {TEvReadSet step# 3500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-02-19T08:19:02.337476Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:19:02.337504Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-02-19T08:19:02.505849Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1202:2981], Recipient [2:1160:2955]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:19:02.506045Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:19:02.506142Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-02-19T08:19:02.506285Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:19:02.506336Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:19:02.506374Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:19:02.506410Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:19:02.506464Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-02-19T08:19:02.506503Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:19:02.506546Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:19:02.506571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:19:02.506602Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:19:02.506719Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:19:02.507012Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-02-19T08:19:02.507102Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:1202:2981], 0} after executionsCount# 1 2026-02-19T08:19:02.507152Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:1202:2981], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:19:02.507231Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:1202:2981], 0} finished in read 2026-02-19T08:19:02.507306Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:19:02.507335Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:19:02.507359Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:19:02.507383Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:19:02.507430Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:19:02.507471Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:19:02.507502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-02-19T08:19:02.507564Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:19:02.507685Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:19:02.508451Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1202:2981], Recipient [2:1160:2955]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:19:02.508515Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:19:02.508660Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1202:2981], Recipient [2:902:2776]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-02-19T08:19:02.508765Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:19:02.508813Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2026-02-19T08:19:02.508891Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-02-19T08:19:02.508922Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:19:02.508953Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:19:02.508990Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:19:02.509033Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2026-02-19T08:19:02.509076Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-02-19T08:19:02.509100Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:19:02.509121Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:19:02.509140Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:19:02.509225Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-02-19T08:19:02.509432Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-02-19T08:19:02.509472Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[2:1202:2981], 1} after executionsCount# 1 2026-02-19T08:19:02.509505Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[2:1202:2981], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:19:02.509570Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[2:1202:2981], 1} finished in read 2026-02-19T08:19:02.509625Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-02-19T08:19:02.509649Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:19:02.509671Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:19:02.510622Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:19:02.510711Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-02-19T08:19:02.510762Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:19:02.510792Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037889 has finished 2026-02-19T08:19:02.510818Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:19:02.510901Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:19:02.512363Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1202:2981], Recipient [2:902:2776]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-02-19T08:19:02.512413Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 1 } >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> AsyncIndexChangeCollector::UpsertSingleRow >> TestSqsTopicHttpProxy::TestCreateQueueFifo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSetQueueAttributesInvalidValues [GOOD] Test command err: 2026-02-19T08:18:12.947484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:12.947601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:13.030898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:13.031036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:13.038729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:13.121832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487718074444898:2081] 1771489092520129 != 1771489092520132 2026-02-19T08:18:13.131049Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:13.149775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:13.242364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:13.242384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:13.242391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:13.242532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:13.610144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:13.618803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:18:13.633914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:14.000985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:14.009899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:18:14.011994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:14.027445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:18:14.127510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.260979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.309292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.372219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.409304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.439706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.504355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.551901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.593180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.635317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:16.651317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487735254315749:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.651424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487735254315757:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.651481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.653101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487735254315764:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.653197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.656847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:16.674065Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487735254315763:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:18:16.752268Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487735254315816:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:17.215689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.254038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.285797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.357871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.408637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ... 2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:19:02.666324Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-02-19T08:19:02.666338Z node 6 :PERSQUEUE INFO: partition_mlp.cpp:156: [72075186224037907][Partition][0][StateIdle] Updating MLP consumer 'ydb-sqs-consumer' config 2026-02-19T08:19:02.666820Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:19:02.666855Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:02.666872Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.666883Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:02.666900Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.666912Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:02.666938Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:19:02.667613Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "TestSetAttrsInvalid" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { Ids: 0 Codecs: "raw" } TopicPath: "/Root/TestSetAttrsInvalid" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "ydb-sqs-consumer" Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DefaultProcessingTimeoutSeconds: 30 } MonitoringProjectId: "" 2026-02-19T08:19:02.667688Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-02-19T08:19:02.682091Z node 6 :SQS INFO: http_req.cpp:1237: Request [961d8c7c-5a8de6a3-db78a536-9f35485d] Request done. Api [SqsTopic] Action [CreateQueue] User [AKIDEXAMPLE] Database [/Root] IP [5846:7ce8:8e7b:0:4046:7ce8:8e7b:0] Duration [132ms] Code [200] 2026-02-19T08:19:02.682126Z node 6 :HTTP_PROXY INFO: http_req.cpp:1667: http request [CreateQueue] requestId [961d8c7c-5a8de6a3-db78a536-9f35485d] reply ok 2026-02-19T08:19:02.682419Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:37234) <- (200 , 111 bytes) 2026-02-19T08:19:02.682514Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:37234) connection closed Http output full {"QueueUrl":"http://ghrun-fkrokuvqke.auto.internal:8771/v1/5//Root/19/TestSetAttrsInvalid/16/ydb-sqs-consumer"} 2026-02-19T08:19:02.686953Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:37246) incoming connection opened 2026-02-19T08:19:02.687110Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:37246) -> (POST /Root, 133 bytes) 2026-02-19T08:19:02.687489Z node 6 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b812:65e8:8e7b:0:a012:65e8:8e7b:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: 8aea3d5a-7088ee89-cc7532bd-217ddb88 2026-02-19T08:19:02.687986Z node 6 :HTTP_PROXY INFO: http_req.cpp:1385: http request [SetQueueAttributes] requestId [8aea3d5a-7088ee89-cc7532bd-217ddb88] got new request from [b812:65e8:8e7b:0:a012:65e8:8e7b:0] database '/Root' stream '' 2026-02-19T08:19:02.688593Z node 6 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SetQueueAttributes] requestId [8aea3d5a-7088ee89-cc7532bd-217ddb88] [auth] Authorized successfully 2026-02-19T08:19:02.688643Z node 6 :HTTP_PROXY INFO: http_req.cpp:1117: http request [SetQueueAttributes] requestId [8aea3d5a-7088ee89-cc7532bd-217ddb88] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:02.693842Z node 6 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [SetQueueAttributes] requestId [8aea3d5a-7088ee89-cc7532bd-217ddb88] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:19:02.694001Z node 6 :SQS INFO: http_req.cpp:1237: Request [8aea3d5a-7088ee89-cc7532bd-217ddb88] Request done. Api [SqsTopic] Action [SetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [TestSetAttrsInvalid] IP [b812:65e8:8e7b:0:a012:65e8:8e7b:0] Duration [6ms] Code [400] Response [Invalid VisibilityTimeout] 2026-02-19T08:19:02.694021Z node 6 :HTTP_PROXY INFO: http_req.cpp:1671: http request [SetQueueAttributes] requestId [8aea3d5a-7088ee89-cc7532bd-217ddb88] reply with status: STATUS_UNDEFINED message: Invalid VisibilityTimeout 2026-02-19T08:19:02.694352Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:37246) <- (400 InvalidParameterValue, 72 bytes) 2026-02-19T08:19:02.694413Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:37246) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "Attributes": { "VisibilityTimeout":"-1" }, "QueueUrl":"/v1/5//Root/19/TestSetAttrsInvalid/16/ydb-sqs-consumer" } 2026-02-19T08:19:02.694454Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:37246) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 8aea3d5a-7088ee89-cc7532bd-217ddb88 Content-Type: application/x-amz-json-1.1 Content-Length: 72 2026-02-19T08:19:02.694554Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:37246) connection closed Http output full {"__type":"InvalidParameterValue","message":"Invalid VisibilityTimeout"} 2026-02-19T08:19:02.696097Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:37262) incoming connection opened 2026-02-19T08:19:02.696223Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:37262) -> (POST /Root, 129 bytes) 2026-02-19T08:19:02.696399Z node 6 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [189f:dee8:8e7b:0:9f:dee8:8e7b:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: d4f32462-56ac49b5-d8a5f0b3-a6fe832b 2026-02-19T08:19:02.696855Z node 6 :HTTP_PROXY INFO: http_req.cpp:1385: http request [SetQueueAttributes] requestId [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] got new request from [189f:dee8:8e7b:0:9f:dee8:8e7b:0] database '/Root' stream '' 2026-02-19T08:19:02.697489Z node 6 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SetQueueAttributes] requestId [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] [auth] Authorized successfully 2026-02-19T08:19:02.697545Z node 6 :HTTP_PROXY INFO: http_req.cpp:1117: http request [SetQueueAttributes] requestId [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:02.698660Z node 6 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [SetQueueAttributes] requestId [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:19:02.698763Z node 6 :SQS INFO: http_req.cpp:1237: Request [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] Request done. Api [SqsTopic] Action [SetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [TestSetAttrsInvalid] IP [189f:dee8:8e7b:0:9f:dee8:8e7b:0] Duration [2ms] Code [400] Response [DelaySeconds exceeds maximum of 900 seconds] 2026-02-19T08:19:02.698780Z node 6 :HTTP_PROXY INFO: http_req.cpp:1671: http request [SetQueueAttributes] requestId [d4f32462-56ac49b5-d8a5f0b3-a6fe832b] reply with status: STATUS_UNDEFINED message: DelaySeconds exceeds maximum of 900 seconds 2026-02-19T08:19:02.699030Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:37262) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:19:02.699101Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:37262) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "Attributes": { "DelaySeconds":"901" }, "QueueUrl":"/v1/5//Root/19/TestSetAttrsInvalid/16/ydb-sqs-consumer" } 2026-02-19T08:19:02.699134Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:37262) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: d4f32462-56ac49b5-d8a5f0b3-a6fe832b Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:19:02.699207Z node 6 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:37262) connection closed Http output full {"__type":"InvalidParameterValue","message":"DelaySeconds exceeds maximum of 900 seconds"} 2026-02-19T08:19:02.761828Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:02.761865Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.761882Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:02.761901Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.761915Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:02.869821Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:02.869860Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.869876Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:02.869895Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.869910Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:02.969930Z node 6 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:02.969965Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.969978Z node 6 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:02.969995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:02.970007Z node 6 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-02-19T08:17:56.983801Z :ReadSession INFO: Random seed for debugging is 1771489076983765 2026-02-19T08:17:57.870456Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487651003965456:2162];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:57.870517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:57.938226Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:17:57.938475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:57.995387Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:17:58.431230Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:58.431496Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:58.463053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:58.515017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:58.515179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:58.517053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:58.517118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:58.525139Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:17:58.525320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:58.615594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:58.647004Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:58.711408Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:58.796078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:58.836624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00394f/r3tmp/yandexOQKdEo.tmp 2026-02-19T08:17:58.836657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00394f/r3tmp/yandexOQKdEo.tmp 2026-02-19T08:17:58.836828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00394f/r3tmp/yandexOQKdEo.tmp 2026-02-19T08:17:58.836961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:58.895434Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:58.922365Z INFO: TTestServer started on Port 17590 GrpcPort 5377 2026-02-19T08:17:58.967926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:5377 2026-02-19T08:17:59.198744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:01.997543Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487668131688531:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:01.997623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487668131688556:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:01.997700Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:01.998682Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487668131688561:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:01.998757Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:02.006166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:02.049054Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487668131688560:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-02-19T08:18:02.447913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:02.451872Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487672426655886:2141] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:02.465178Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487672478803168:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:18:02.467876Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NjkzZTU1YzEtMmYxMjI2NGItZGQ3YjVlYzMtOWQzNjRhMTM=, ActorId: [1:7608487672478803132:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfmep37ctnp2h0thqc4821, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:18:02.475907Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:18:02.483656Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608487672426655895:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:18:02.484066Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=MjA5YjlhMzktOTIyMWZiYWYtOTFiN2MwNDktYTBjNTAyNDA=, ActorId: [2:7608487668131688528:2305], ActorState: ExecuteState, LegacyTraceId: 01khtfmema0tk168ev5hd0c3c8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:18:02.484441Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbSta ... am-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-02-19T08:19:01.655532Z :INFO: [/Root] [/Root] [3737c788-f865c608-7c7dd969-784e2f75] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1645 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:19:01.656262Z :INFO: [/Root] [/Root] [3737c788-f865c608-7c7dd969-784e2f75] Closing read session. Close timeout: 0.000000s 2026-02-19T08:19:01.656315Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-02-19T08:19:01.656364Z :INFO: [/Root] [/Root] [3737c788-f865c608-7c7dd969-784e2f75] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1646 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:19:01.656481Z :NOTICE: [/Root] [/Root] [3737c788-f865c608-7c7dd969-784e2f75] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:19:01.673482Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_4563970802672839975_v1 2026-02-19T08:19:01.673527Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037892] server disconnected, pipe [7:7608487922690202075:2505] destroyed 2026-02-19T08:19:01.673580Z node 8 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_7_1_4563970802672839975_v1 2026-02-19T08:19:01.672171Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_7_1_4563970802672839975_v1 grpc read done: success# 0, data# { } 2026-02-19T08:19:01.672203Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_7_1_4563970802672839975_v1 grpc read failed 2026-02-19T08:19:01.672236Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_7_1_4563970802672839975_v1 grpc closed 2026-02-19T08:19:01.672280Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_7_1_4563970802672839975_v1 is DEAD 2026-02-19T08:19:01.673238Z node 7 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [7:7608487922690202072:2502] disconnected. 2026-02-19T08:19:01.673265Z node 7 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [7:7608487922690202072:2502] disconnected; active server actors: 1 2026-02-19T08:19:01.673284Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [7:7608487922690202072:2502] client user disconnected session shared/user_7_1_4563970802672839975_v1 2026-02-19T08:19:01.750637Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:01.750668Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:01.750680Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:01.750699Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:01.750710Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:19:01.854895Z node 8 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:01.854940Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:01.854959Z node 8 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:01.854986Z node 8 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:01.854999Z node 8 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:19:02.137995Z node 7 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1270} ActorId: [7:7608487931280136844:2528] TxId: 281474976710683. Ctx: { TraceId: 01khtfp8ne33whnh274nc0mdtj, Database: /Root, SessionId: ydb://session/3?node_id=7&id=YmYxYmIzZDQtNmUyOWFkODctYjdmMmE3N2EtNmZlZDQ5Y2U=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 trace_id# 2026-02-19T08:19:02.138136Z node 7 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [7:7608487931280136848:2528], TxId: 281474976710683, task: 3. Ctx: { CheckpointId : . TraceId : 01khtfp8ne33whnh274nc0mdtj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmYxYmIzZDQtNmUyOWFkODctYjdmMmE3N2EtNmZlZDQ5Y2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [7:7608487931280136844:2528], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2026-02-19T08:19:04.046996Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.047040Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.047123Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:19:04.047475Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:19:04.048858Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:19:04.049129Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.049668Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-02-19T08:19:04.051437Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.051490Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.051525Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:19:04.051875Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:19:04.052408Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:19:04.052576Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.052833Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:19:04.054214Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:19:04.054876Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-02-19T08:19:04.054992Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-02-19T08:19:04.055305Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:19:04.055358Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:19:04.055396Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:19:04.055463Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-02-19T08:19:04.059980Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.060021Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.060070Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:19:04.061200Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:19:04.061733Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:19:04.061934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.062355Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:19:04.063265Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.066018Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:19:04.066208Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:19:04.066305Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:19:04.066424Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2026-02-19T08:19:04.071699Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.071814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.071879Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:19:04.072310Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:19:04.072768Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:19:04.072937Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.074026Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:04.074194Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:19:04.074287Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:19:04.074377Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:17:56.251618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:17:56.251767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:56.251850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:17:56.251904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:17:56.251968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:17:56.252014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:17:56.252054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:17:56.252083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:17:56.252176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:17:56.252260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:17:56.253257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:17:56.253587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:17:56.357257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:17:56.357340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:56.385125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:17:56.389803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:17:56.390079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:17:56.421247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:17:56.422165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:17:56.423147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:17:56.423489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:17:56.428211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:56.428456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:17:56.429992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:17:56.430082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:17:56.430214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:17:56.430271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:17:56.430326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:17:56.430524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:17:56.631280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.632931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:17:56.633424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.184835Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:19:04.184934Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:19:04.185025Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:19:04.191289Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.191393Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:19:04.191527Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:19:04.194171Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.194247Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.194361Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:19:04.194485Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:19:04.194797Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:19:04.197511Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:19:04.197780Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:19:04.198360Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:04.198583Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 158913792111 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:19:04.198686Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:19:04.199124Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:19:04.199233Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:19:04.199652Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:19:04.199784Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:19:04.203592Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:04.203698Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:04.204128Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:04.204216Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:19:04.204393Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.204491Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:19:04.204762Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:19:04.204843Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:19:04.204927Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:19:04.204998Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:19:04.205090Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:19:04.205189Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:19:04.205271Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:19:04.205341Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:19:04.205481Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:19:04.205565Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:19:04.205644Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:19:04.207338Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:19:04.207584Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:19:04.207672Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:19:04.207749Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:19:04.207850Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:19:04.208031Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:19:04.211898Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:19:04.212694Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:19:04.214240Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [37:686:2675] Bootstrap 2026-02-19T08:19:04.217582Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [37:686:2675] Become StateWork (SchemeCache [37:691:2680]) 2026-02-19T08:19:04.221452Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:19:04.222222Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:19:04.222468Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-02-19T08:19:04.223444Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-02-19T08:19:04.225586Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [37:686:2675] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:19:04.229163Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:19:04.229662Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:19:04.231276Z node 37 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TestSqsTopicHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicWithSharedConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> TestSqsTopicHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> AsyncIndexChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> DataShardVolatile::DistributedWrite >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> DataShardVolatile::DistributedWriteThenDropTable >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdWithConsumersRetentionShrinked [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows |91.7%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueAttributesStdWithConsumersRetentionShrinked [GOOD] Test command err: 2026-02-19T08:18:11.384323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487711167215737:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:11.384479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:11.924661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:12.025412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:12.025517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:12.054655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:12.100077Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:12.105917Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487711167215700:2081] 1771489091382061 != 1771489091382064 2026-02-19T08:18:12.301841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:12.380262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:12.380285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:12.380295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:12.380384Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:12.397209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:12.793273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:12.805243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:18:13.053893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:13.071423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:18:13.079529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:13.106650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:18:13.254068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.533905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.608701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.673913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:18:13.778796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.834741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.895597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.954834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.991296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.028522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.063874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:16.289576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487732642053847:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.289722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.290469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487732642053859:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.290548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487732642053860:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.290663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.295230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:16.327380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487732642053863:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:18:16.384644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487711167215737:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:16.384746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:16.413650Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487732642053914:3110] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:16.895711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:16.988579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.078287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pr ... tes] requestId [d54745cd-58a353d4-3aff2b2d-29bc4ae] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:09.439296Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [d54745cd-58a353d4-3aff2b2d-29bc4ae] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:19:09.439411Z node 7 :SQS INFO: http_req.cpp:1237: Request [d54745cd-58a353d4-3aff2b2d-29bc4ae] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [f885:6ecd:ad7b:0:e085:6ecd:ad7b:0] Duration [3ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:19:09.439431Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [d54745cd-58a353d4-3aff2b2d-29bc4ae] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:19:09.439686Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#38,[::1]:45622) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:19:09.439749Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#38,[::1]:45622) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:19:09.439780Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#38,[::1]:45622) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: d54745cd-58a353d4-3aff2b2d-29bc4ae Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:19:09.439846Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#38,[::1]:45622) connection closed Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:19:09.441225Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#38,[::1]:45638) incoming connection opened 2026-02-19T08:19:09.441304Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#38,[::1]:45638) -> (POST /Root, 143 bytes) 2026-02-19T08:19:09.441498Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b8c9:70cd:ad7b:0:a0c9:70cd:ad7b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: c9fc217b-5b34270e-80f2dda7-ac39422b 2026-02-19T08:19:09.441949Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [c9fc217b-5b34270e-80f2dda7-ac39422b] got new request from [b8c9:70cd:ad7b:0:a0c9:70cd:ad7b:0] database '/Root' stream '' 2026-02-19T08:19:09.449161Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [c9fc217b-5b34270e-80f2dda7-ac39422b] [auth] Authorized successfully 2026-02-19T08:19:09.449277Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [c9fc217b-5b34270e-80f2dda7-ac39422b] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:09.454437Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [c9fc217b-5b34270e-80f2dda7-ac39422b] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:19:09.454588Z node 7 :SQS INFO: http_req.cpp:1237: Request [c9fc217b-5b34270e-80f2dda7-ac39422b] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [b8c9:70cd:ad7b:0:a0c9:70cd:ad7b:0] Duration [13ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:19:09.454609Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [c9fc217b-5b34270e-80f2dda7-ac39422b] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:19:09.454919Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#38,[::1]:45638) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:19:09.454988Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#38,[::1]:45638) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "All", "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:19:09.455025Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#38,[::1]:45638) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: c9fc217b-5b34270e-80f2dda7-ac39422b Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:19:09.455156Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#38,[::1]:45638) connection closed Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:19:09.457356Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#38,[::1]:45654) incoming connection opened 2026-02-19T08:19:09.457437Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#38,[::1]:45654) -> (POST /Root, 152 bytes) 2026-02-19T08:19:09.457633Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f8f8:61cd:ad7b:0:e0f8:61cd:ad7b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: a6accde2-f4ef815f-45d5a403-da0cd4e3 2026-02-19T08:19:09.458095Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [a6accde2-f4ef815f-45d5a403-da0cd4e3] got new request from [f8f8:61cd:ad7b:0:e0f8:61cd:ad7b:0] database '/Root' stream '' 2026-02-19T08:19:09.458772Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [a6accde2-f4ef815f-45d5a403-da0cd4e3] [auth] Authorized successfully 2026-02-19T08:19:09.458821Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [a6accde2-f4ef815f-45d5a403-da0cd4e3] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:09.459866Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [a6accde2-f4ef815f-45d5a403-da0cd4e3] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:19:09.459969Z node 7 :SQS INFO: http_req.cpp:1237: Request [a6accde2-f4ef815f-45d5a403-da0cd4e3] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [f8f8:61cd:ad7b:0:e0f8:61cd:ad7b:0] Duration [2ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:19:09.459986Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [a6accde2-f4ef815f-45d5a403-da0cd4e3] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:19:09.460257Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#38,[::1]:45654) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:19:09.460307Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#38,[::1]:45654) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "DelaySeconds", "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:19:09.460339Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#38,[::1]:45654) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: a6accde2-f4ef815f-45d5a403-da0cd4e3 Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:19:09.460412Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#38,[::1]:45654) connection closed Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:19:09.461783Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#38,[::1]:45662) incoming connection opened 2026-02-19T08:19:09.461890Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#38,[::1]:45662) -> (POST /Root, 501 bytes) 2026-02-19T08:19:09.462063Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [78cf:6ecd:ad7b:0:60cf:6ecd:ad7b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 81ff4bf2-2183ad19-8e9b83d5-fa00079a 2026-02-19T08:19:09.462546Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [81ff4bf2-2183ad19-8e9b83d5-fa00079a] got new request from [78cf:6ecd:ad7b:0:60cf:6ecd:ad7b:0] database '/Root' stream '' 2026-02-19T08:19:09.463075Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [81ff4bf2-2183ad19-8e9b83d5-fa00079a] [auth] Authorized successfully 2026-02-19T08:19:09.463138Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [81ff4bf2-2183ad19-8e9b83d5-fa00079a] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Attributes":{"FifoQueue":"false","MaximumMessageSize":"262144","DelaySeconds":"0","CreatedTimestamp":"1771489149","VisibilityTimeout":"25","RedrivePolicy":"null","QueueArn":"yrn:yc:ymq:ru-central1::/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0","ApproximateNumberOfMessagesDelayed":"0","ApproximateNumberOfMessages":"0","MessageRetentionPeriod":"36000","ContentBasedDeduplication":"false","ReceiveMessageWaitTimeSeconds":"0","ApproximateNumberOfMessagesNotVisible":"0"}} 2026-02-19T08:19:09.464484Z node 7 :SQS INFO: http_req.cpp:1237: Request [81ff4bf2-2183ad19-8e9b83d5-fa00079a] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [78cf:6ecd:ad7b:0:60cf:6ecd:ad7b:0] Duration [2ms] Code [200] 2026-02-19T08:19:09.464506Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [GetQueueAttributes] requestId [81ff4bf2-2183ad19-8e9b83d5-fa00079a] reply ok 2026-02-19T08:19:09.464773Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#38,[::1]:45662) <- (200 , 480 bytes) 2026-02-19T08:19:09.464851Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#38,[::1]:45662) connection closed 2026-02-19T08:19:09.510483Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:09.510522Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:09.510553Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:09.510574Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:09.510587Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:09.611944Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:09.611986Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:09.612002Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:09.612020Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:09.612035Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:18:52.628959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:52.752177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:18:52.770724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:52.770816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:18:52.771500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:18:53.049008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:53.049177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:53.152773Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489129212038 != 1771489129212042 2026-02-19T08:18:53.161220Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:53.211101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:53.312836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:18:53.656411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:53.670354Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:53.813260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:53.914284Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:18:53.915431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:18:53.915801Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:18:53.916108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:53.927611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:18:53.969529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:53.969734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:53.971747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:18:53.971843Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:18:53.971906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:18:53.972336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:53.972500Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:53.972620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:18:53.986567Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:18:54.016960Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:18:54.017213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:18:54.017350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:18:54.017400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:18:54.017445Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:18:54.017482Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:54.017735Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.017788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:18:54.018259Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:18:54.018405Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:18:54.018519Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:18:54.018561Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:18:54.018644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:18:54.018712Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:18:54.018753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:18:54.018787Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:18:54.018833Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:18:54.018927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:54.018969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:18:54.019024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:18:54.019142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:18:54.019198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:18:54.019307Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:18:54.019656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:18:54.019723Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:18:54.019826Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:18:54.019891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:18:54.019931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:18:54.019970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:18:54.020061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:18:54.020410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:18:54.020448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:18:54.020501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:18:54.020537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:18:54.020606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:18:54.020651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:18:54.020697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:18:54.020737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:18:54.020764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:18:54.022842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:18:54.022914Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:18:54.034401Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... pp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is DelayComplete 2026-02-19T08:19:10.508994Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2026-02-19T08:19:10.509043Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:19:10.509097Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:19:10.509161Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is Executed 2026-02-19T08:19:10.509190Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:19:10.509218Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037889 has finished 2026-02-19T08:19:10.509257Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:10.509285Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2026-02-19T08:19:10.509313Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-02-19T08:19:10.509344Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2026-02-19T08:19:10.521021Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:10.521103Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:10.521145Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037889 on unit CompleteOperation 2026-02-19T08:19:10.521223Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [4500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1347:3117], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:10.521291Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:10.521507Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 4500} 2026-02-19T08:19:10.521564Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:10.521596Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:10.521943Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287940, Sender [3:1347:3117], Recipient [3:1173:2968]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2026-02-19T08:19:10.521999Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3237: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-02-19T08:19:10.522155Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [3:1173:2968], Recipient [3:1173:2968]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:10.522187Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:10.522250Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:19:10.522287Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:10.522326Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2026-02-19T08:19:10.522361Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2026-02-19T08:19:10.522399Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [4500:281474976715668] at 72075186224037890 2026-02-19T08:19:10.522438Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-02-19T08:19:10.522471Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2026-02-19T08:19:10.522504Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2026-02-19T08:19:10.522536Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-02-19T08:19:10.522806Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Continue 2026-02-19T08:19:10.522841Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:10.522871Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-02-19T08:19:10.522904Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2026-02-19T08:19:10.522937Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-02-19T08:19:10.523855Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435082, Sender [3:1366:3134], Recipient [3:1173:2968]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-02-19T08:19:10.523909Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3246: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-02-19T08:19:10.524129Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2026-02-19T08:19:10.524950Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:19:10.526669Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2026-02-19T08:19:10.526731Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2026-02-19T08:19:10.600411Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037890 2026-02-19T08:19:10.600480Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715668, at: 72075186224037890 2026-02-19T08:19:10.600683Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [3:1173:2968], Recipient [3:1173:2968]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:10.600732Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:10.600806Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:19:10.600846Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:10.600887Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for ReadTableScan 2026-02-19T08:19:10.600922Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-02-19T08:19:10.600962Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [4500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2026-02-19T08:19:10.601009Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-02-19T08:19:10.601045Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2026-02-19T08:19:10.601093Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2026-02-19T08:19:10.601126Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-02-19T08:19:10.601327Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is DelayComplete 2026-02-19T08:19:10.601365Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2026-02-19T08:19:10.601398Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:19:10.601430Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:19:10.601469Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-02-19T08:19:10.601495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:19:10.601546Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037890 has finished 2026-02-19T08:19:10.601581Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:10.601615Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-02-19T08:19:10.601645Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-02-19T08:19:10.601676Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-02-19T08:19:10.612967Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:10.613044Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:10.613085Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-02-19T08:19:10.613149Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [4500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1347:3117], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:19:10.613203Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |91.7%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true [GOOD] >> VectorIndexBuildTest::TTxReply_DoExecute_Throws >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicWithSharedConsumer [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TBackupCollectionTests::DisallowedPath >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicWithStreamingConsumer >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty [GOOD] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TestSqsTopicHttpProxy::TestCreateQueueWithBadQueueName >> YdbTableSplit::SplitByLoadWithReads >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:18:52.146298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:52.254834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:18:52.276096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:52.276203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:18:52.276810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:18:52.581332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:52.581453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:52.684218Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489129320863 != 1771489129320867 2026-02-19T08:18:52.696947Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:52.746097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:52.856806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:18:53.176105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:53.192463Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:53.301290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:53.363825Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:18:53.364177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:53.419145Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:53.419321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:53.421557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:18:53.421679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:18:53.421779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:18:53.422403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:53.422584Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:53.422687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:18:53.434414Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:18:53.478029Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:18:53.478303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:18:53.478440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:18:53.478481Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:18:53.478521Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:18:53.478559Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:53.479108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:18:53.479243Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:18:53.479338Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:18:53.479386Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:18:53.479443Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:18:53.479490Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:18:53.479557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:18:53.479721Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:18:53.480011Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:18:53.480132Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:18:53.483264Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:18:53.494832Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:18:53.494987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:18:53.633146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:18:53.638200Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:18:53.638308Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:53.638687Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:18:53.638745Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:18:53.638814Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:18:53.639108Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:18:53.639274Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:18:53.640101Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:18:53.640209Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:18:53.642487Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:18:53.642947Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:18:53.645532Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:18:53.645593Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:53.646106Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:18:53.646204Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:18:53.647882Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:18:53.647936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:18:53.647989Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:18:53.648080Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:18:53.648147Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:18:53.648257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:18:53.655958Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:18:53.658169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:18:53.658269Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:18:53.659141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... Heartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.001755Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:13.002928Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:13.002986Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:13.003628Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:13.016613Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:13.016793Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:13.016851Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:236: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-02-19T08:19:13.016889Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-02-19T08:19:13.017422Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:13.031845Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:13.199078Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:13.199178Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.199550Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:13.199599Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:13.199649Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:13.199871Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-02-19T08:19:13.200008Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:13.200221Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:13.201083Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:13.244750Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-02-19T08:19:13.244883Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:13.244931Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:13.244980Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.245066Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:13.245133Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-02-19T08:19:13.245227Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.247605Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-02-19T08:19:13.247707Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:13.258063Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1068:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:13.258174Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:13.258245Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:13.258945Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1083:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:13.259020Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:13.263985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:13.273156Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:13.454909Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:13.458656Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1082:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:19:13.486623Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1140:2944] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:13.555264Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1170:2961], serverId# [4:1171:2962], sessionId# [0:0:0] 2026-02-19T08:19:13.555745Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037888 2026-02-19T08:19:13.556028Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489153555927 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:13.556214Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-02-19T08:19:13.567953Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:13.568057Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.632515Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:4] at 72075186224037888 2026-02-19T08:19:13.632744Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489153632680 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:13.632851Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-02-19T08:19:13.644478Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:13.644574Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.741613Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:5] at 72075186224037888 2026-02-19T08:19:13.741993Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 3 Group: 1771489153741865 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:13.742151Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:5] at 72075186224037888, row count=1 2026-02-19T08:19:13.753249Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:13.753337Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.755383Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1215:2991], serverId# [4:1216:2992], sessionId# [0:0:0] 2026-02-19T08:19:13.762571Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1217:2993], serverId# [4:1218:2994], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: 2026-02-19T08:17:44.970535Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2026-02-19T08:17:45.083496Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:45.083623Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:45.083691Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.083769Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-02-19T08:17:45.109800Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.138854Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-02-19T08:17:45.140204Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-02-19T08:17:45.143055Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-02-19T08:17:45.146789Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:191:2142] 2026-02-19T08:17:45.149083Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:191:2142] 2026-02-19T08:17:45.159647Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.160200Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|99170741-c59dd08-e2c94063-35685cb6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.184082Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|987164a9-d7253caf-22067ab4-fffdc133_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.239977Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.240474Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1b099f59-f4f94ba8-5465306a-9e90ba79_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.258593Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.259058Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|420debbb-853f465e-1040f591-eead9241_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.270147Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.270626Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d8bd1b52-45655f2b-bff00314-2e8b5f30_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.310921Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:17:45.311356Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4bd642cd-e9d26528-87dcc328-504014fb_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:17:45.918995Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:107:2057] recipient: [2:105:2138] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:112:2057] recipient: [2:105:2138] 2026-02-19T08:17:45.973075Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:45.973147Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:45.973240Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:45.973311Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:153:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927938 is [2:157:2176] sender: [2:158:2057] recipient: [2:151:2172] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:183:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:111:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:185:2057] recipient: [2:103:2137] Leader for TabletID 72057594037927937 is [2:111:2142] sender: [2:188:2057] recipient: [2:187:2197] Leader for TabletID 72057594037927937 is [2:189:2198] sender: [2:190:2057] recipient: [2:187:2197] 2026-02-19T08:17:46.052995Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:17:46.053071Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:17:46.053132Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:17:46.053198Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:111:2142]) rebooted! !Reboot 72057594037927937 (actor [2:111:2142]) tablet resolver refreshed! new actor is[2:189:2198] 2026-02-19T08:17:46.077269Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.145897Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.179615Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.193611Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.232684Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.294080Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.342681Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.477553Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.513981Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.838052Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:46.889251Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:47.253991Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:17:47.570023Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: ... allowFollowers: 0 2026-02-19T08:19:13.069281Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 47 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:19:13.317387Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 47 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:19:13.428644Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 47 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [47:289:2277] sender: [47:390:2057] recipient: [47:14:2061] 2026-02-19T08:19:14.026075Z node 48 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 48 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:112:2057] recipient: [48:105:2138] 2026-02-19T08:19:14.104935Z node 48 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:19:14.105001Z node 48 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:19:14.105053Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:19:14.105110Z node 48 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927938 is [48:157:2176] sender: [48:158:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:183:2057] recipient: [48:14:2061] 2026-02-19T08:19:14.135561Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:19:14.136558Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 48 actor [48:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 } 2026-02-19T08:19:14.137792Z node 48 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [48:189:2142] 2026-02-19T08:19:14.140994Z node 48 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:189:2142] 2026-02-19T08:19:14.143222Z node 48 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [48:190:2142] 2026-02-19T08:19:14.145617Z node 48 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:190:2142] 2026-02-19T08:19:14.159976Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.160541Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c2d8013e-ebf682a-19ffeb9b-fa70d3ee_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.167601Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5b05851e-507c29c6-116b1d79-46b5a640_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.205266Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.205874Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2b56ed41-3b25604-e80db3c7-333558b4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.219982Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.220608Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|377ea563-5350a68d-afa0c575-7c3971df_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.231555Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.232119Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7dd13d56-d0130984-3108eb44-1e7bd4f8_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.242531Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.243106Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5974830d-47924823-8ca30eda-30aaf611_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.771369Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 49 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:112:2057] recipient: [49:105:2138] 2026-02-19T08:19:14.835150Z node 49 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:19:14.835217Z node 49 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:19:14.835267Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:19:14.835337Z node 49 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927938 is [49:157:2176] sender: [49:158:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:183:2057] recipient: [49:14:2061] 2026-02-19T08:19:14.857219Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:19:14.858158Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 49 actor [49:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 } 2026-02-19T08:19:14.859243Z node 49 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [49:189:2142] 2026-02-19T08:19:14.862396Z node 49 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:189:2142] 2026-02-19T08:19:14.864557Z node 49 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [49:190:2142] 2026-02-19T08:19:14.866958Z node 49 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:190:2142] 2026-02-19T08:19:14.879488Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.880041Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1346cc00-f4bc948-b2af1cb8-a378d22c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.886610Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8a750cd1-e192678f-1e756443-c9ce437_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.923674Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.924225Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|50bff40f-ceaf127-f9872a7d-ca68ed75_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.935941Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.936496Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|97920ba6-91b86413-6f51788b-4efb415f_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.946572Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.947079Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6a28d386-9a38836e-4d9b0f00-1bebf6dd_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:19:14.957869Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:19:14.958471Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|14a170-72ecfc8e-a492649e-eab2ed16_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TestSqsTopicHttpProxy::TestReceiveMessage >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> VectorIndexBuildTest::TTxReply_DoExecute_Throws [GOOD] >> VectorIndexBuildTest::TTxProgress_Throws >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> IndexBuildTest::BaseCaseUniq [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::RenameTablesAndSplit >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> YdbTableSplit::MergeByNoLoadAfterSplit >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::BaseCaseUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:29.910293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:29.910388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.910466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:29.910508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:29.910545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:29.910589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:29.910626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:29.910652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:29.910716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.910816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:29.911668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:29.911985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:30.003551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:30.003605Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:30.015902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:30.016758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:30.016956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:30.028692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:30.029345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:30.030148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:30.030440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:30.034083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:30.034259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:30.035513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:30.035579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:30.035701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:30.035747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:30.035803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:30.035988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:30.210070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.211992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:30.212081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... true 2026-02-19T08:19:17.415746Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976735761, at schemeshard: 72075186233409555 2026-02-19T08:19:17.441115Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877763, Sender [6:5565:7291], Recipient [6:4222:5986]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409555 ClientId: [6:5565:7291] ServerId: [6:5567:7293] } 2026-02-19T08:19:17.441199Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3250: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:19:17.482297Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409555 2026-02-19T08:19:17.482446Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735761 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409557 TabletID: 72075186233409555, at schemeshard: 72075186233409555 2026-02-19T08:19:17.482507Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409555] TDropLock TPropose opId# 281474976735761:0 HandleReply TEvOperationPlan: step# 1900 2026-02-19T08:19:17.482562Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976735761:0 Propose -> Done 2026-02-19T08:19:17.482731Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409555] TDropLock TPropose opId# 281474976735761:1 HandleReply TEvOperationPlan: step# 1900 2026-02-19T08:19:17.482773Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976735761:1 Propose -> Done 2026-02-19T08:19:17.491738Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976735761:0, at schemeshard: 72075186233409555 2026-02-19T08:19:17.491820Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409555] TDone opId# 281474976735761:0 ProgressState 2026-02-19T08:19:17.491931Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:0 progress is 1/2 2026-02-19T08:19:17.491967Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 1/2 2026-02-19T08:19:17.492011Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:0 progress is 1/2 2026-02-19T08:19:17.492046Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 1/2 2026-02-19T08:19:17.492088Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976735761, ready parts: 1/2, is published: true 2026-02-19T08:19:17.492765Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976735761:1, at schemeshard: 72075186233409555 2026-02-19T08:19:17.492815Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409555] TDone opId# 281474976735761:1 ProgressState 2026-02-19T08:19:17.492901Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:1 progress is 2/2 2026-02-19T08:19:17.492930Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 2/2 2026-02-19T08:19:17.492970Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976735761:1 progress is 2/2 2026-02-19T08:19:17.492998Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 2/2 2026-02-19T08:19:17.493032Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976735761, ready parts: 2/2, is published: true 2026-02-19T08:19:17.493104Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:4016:5810] message: TxId: 281474976735761 2026-02-19T08:19:17.493164Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976735761 ready parts: 2/2 2026-02-19T08:19:17.493206Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976735761:0 2026-02-19T08:19:17.493239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976735761:0 2026-02-19T08:19:17.493312Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409555, LocalPathId: 32] was 4 2026-02-19T08:19:17.493353Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976735761:1 2026-02-19T08:19:17.493376Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976735761:1 2026-02-19T08:19:17.493410Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409555, LocalPathId: 34] was 3 2026-02-19T08:19:17.502968Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976735761 2026-02-19T08:19:17.503063Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976735761 2026-02-19T08:19:17.503154Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 115, txId# 281474976735761 2026-02-19T08:19:17.503298Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409555, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409555, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:4782:6532], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1650, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 1818 ReadRows: 202 ReadBytes: 3030 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976735761 2026-02-19T08:19:17.506177Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 115 Unlocking 2026-02-19T08:19:17.506341Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 115 Unlocking TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409555, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409555, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:4782:6532], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1650, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 1818 ReadRows: 202 ReadBytes: 3030 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:17.506415Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:19:17.509223Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 115 Done 2026-02-19T08:19:17.509394Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 115 Done TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409555, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409555, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:4782:6532], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 1650, ApplyTxId: 281474976735760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976735761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 1818 ReadRows: 202 ReadBytes: 3030 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:17.509435Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2026-02-19T08:19:17.509580Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-02-19T08:19:17.509620Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [6:4878:6617] TestWaitNotification: OK eventTxId 115 2026-02-19T08:19:17.514674Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2026-02-19T08:19:17.514971Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { seconds: 1 } EndTime { seconds: 1 } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { seconds: 1 } EndTime { seconds: 1 } } >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> VectorIndexBuildTest::TTxProgress_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Throws >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> CompressExecutor::TestExecutorMemUsage [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:18:58.562457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:58.668754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:18:58.690391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:58.690484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:18:58.691131Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:18:59.031477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:59.031616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:59.118738Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489135370777 != 1771489135370781 2026-02-19T08:18:59.128361Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:59.179604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:59.296506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:18:59.654443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:59.672700Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:59.791302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:59.871734Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:18:59.872041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:59.942941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:59.943229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:59.945420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:18:59.945535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:18:59.945602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:18:59.946123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:59.946463Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:18:59.946734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:18:59.961362Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:59.961657Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:18:59.962751Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:18:59.962864Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:18:59.964510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:18:59.964590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:18:59.964639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:18:59.965002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:18:59.965140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:18:59.965224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:18:59.976252Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:00.010104Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:00.010400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:00.010620Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:19:00.010668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:00.010710Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:00.010746Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:00.011145Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:00.011192Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:00.011257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:00.011336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:19:00.011388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:00.011438Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:00.011492Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:00.011999Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:00.012107Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:00.012213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:00.012265Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:00.012330Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:00.012388Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:00.012443Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:19:00.012537Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:19:00.012626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:19:00.012675Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:00.012701Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:00.012727Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:00.012773Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:00.012930Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:00.013225Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:00.013356Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:00.013970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:897:2775], serverId# [1:909:2782], sessionId# [0:0:0] 2026-02-19T08:19:00.014399Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:19:00.014602Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:00.014677Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-02-19T08:19:00.016717Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:00.016807Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:00.029606Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:00.029765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:00.032477Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:19:00.032607Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:00.160649Z node 1 :TX_DATASHARD ... tashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:19.130720Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:19.130778Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:19.130841Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037889 2026-02-19T08:19:19.130901Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:19.130958Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:19.131037Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:19.133341Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:19.133384Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:19.133426Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:19.133475Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:19.133516Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:19.133574Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:19.133650Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2026-02-19T08:19:19.133678Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:19:19.135236Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:19.135310Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:19.135355Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:19.135993Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2026-02-19T08:19:19.136040Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:19.136670Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:19.136710Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:19:19.136743Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037890 2026-02-19T08:19:19.136789Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:19.136832Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:19.136889Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:19:19.141649Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:19.141922Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-02-19T08:19:19.141990Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:19:19.142530Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:19.142804Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:19.143112Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:19.143165Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:19.143662Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-02-19T08:19:19.143703Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-02-19T08:19:19.156163Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1051:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:19.156276Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:19.156348Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:19.156960Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1065:2896], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:19.157023Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:19.161969Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:19.168764Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:19.168871Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:19.168921Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:19.303199Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:19.303327Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:19.303379Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:19.307858Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1066:2897], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:19:19.342811Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1125:2937] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:19.425558Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1230:2981], serverId# [4:1231:2982], sessionId# [0:0:0] 2026-02-19T08:19:19.425969Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037889 2026-02-19T08:19:19.426243Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489159426149 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:19.426432Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489159426149 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:19.426560Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-02-19T08:19:19.438979Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:19.439063Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:19.445384Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1237:2987], serverId# [4:1238:2988], sessionId# [0:0:0] 2026-02-19T08:19:19.451396Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1239:2989], serverId# [4:1240:2990], sessionId# [0:0:0] >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::Drop >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicWithStreamingConsumer [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> TestSqsTopicHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicNoConsumer >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> VectorIndexBuildTest::TTxInit_Throws [GOOD] >> VectorIndexBuildTest::UnknownState >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> TestSqsTopicHttpProxy::TestCreateQueueWithEmptyName >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:02.303026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:02.423570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:02.445497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:02.445589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:02.446172Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:02.898777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:02.898881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:02.994863Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489138664067 != 1771489138664071 2026-02-19T08:19:03.004194Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:03.052036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:03.153068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:03.490626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:03.509139Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:03.635733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:03.687812Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:19:03.688093Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:03.744008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:03.744275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:03.746196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:03.746291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:03.746349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:03.746731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:03.747067Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:19:03.747286Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:03.755387Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:03.755523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:19:03.756454Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:03.756553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:03.757887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:19:03.757961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:19:03.758006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:19:03.758294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:03.758398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:03.758469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:19:03.770446Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:03.809772Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:03.810030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:03.810177Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:19:03.810219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:03.810256Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:03.810290Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:03.810674Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:03.810716Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:03.810771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:03.810829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:19:03.810877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:03.810914Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:03.810954Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:03.811389Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:03.811515Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:03.811612Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:03.811660Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:03.811713Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:03.811752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:03.811798Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:19:03.811893Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:19:03.811987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:19:03.812032Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:03.812055Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:03.812078Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:03.812105Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:03.812245Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:03.812501Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:03.812642Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:03.813125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:897:2775], serverId# [1:909:2782], sessionId# [0:0:0] 2026-02-19T08:19:03.813530Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:19:03.813740Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:03.813818Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-02-19T08:19:03.815761Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:03.815864Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:03.826771Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:03.826907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:03.827606Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:19:03.827681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:03.963552Z node 1 :TX_DATASHARD ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:22.473206Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:22.479942Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:22.480013Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:22.480073Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:22.480145Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:22.480211Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:22.480304Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:22.482917Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:22.484801Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:22.484886Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:22.485639Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:22.498209Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:22.498360Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:22.498414Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:236: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-02-19T08:19:22.498446Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-02-19T08:19:22.498939Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:22.513244Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:22.714182Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:22.714259Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:22.714608Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:22.714660Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:22.714709Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:22.714907Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-02-19T08:19:22.715041Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:22.715268Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:22.716164Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:22.770361Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-02-19T08:19:22.770481Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:22.770539Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:22.770586Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:22.770671Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:22.770737Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-02-19T08:19:22.770831Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:22.772904Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-02-19T08:19:22.772998Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:22.791758Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1068:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:22.791883Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:22.791965Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:22.792668Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1083:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:22.792773Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:22.806249Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:22.821981Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:22.993901Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:22.996762Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1082:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:19:23.028065Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1140:2944] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:23.110235Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1170:2961], serverId# [4:1171:2962], sessionId# [0:0:0] 2026-02-19T08:19:23.110710Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037888 2026-02-19T08:19:23.111038Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489163110919 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:23.111279Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-02-19T08:19:23.122386Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:23.122509Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:23.207371Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:4] at 72075186224037888 2026-02-19T08:19:23.207718Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489163207582 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:23.207898Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-02-19T08:19:23.222466Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:23.222546Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:23.224935Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1197:2980], serverId# [4:1198:2981], sessionId# [0:0:0] 2026-02-19T08:19:23.238226Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1199:2982], serverId# [4:1200:2983], sessionId# [0:0:0] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: 2026-02-19T08:15:17.188890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486967096001000:2209];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:17.189075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:17.426309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:17.473433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:17.473554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:17.542181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:17.544031Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:17.667670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:17.694371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:17.694397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:17.694412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:17.694513Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:18.191754Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:18.428871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:18.537249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:18.869005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.131303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.224819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.874297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486984275871855:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.874425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.874787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486984275871865:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.874830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.190479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486967096001000:2209];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:22.190548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:22.241616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.302418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.350742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.418568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.495762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.548278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.649912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.716824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.839548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486988570840036:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.839620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.840307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486988570840041:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.840360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486988570840042:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.840445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.845771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:22.863103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486988570840045:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:15:22.943037Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486988570840098:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:24.817684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:32.389933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:15:32.389967Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:04.889465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:04.981621Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:04.985177Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608487427351889751:2081] 1771489024871524 != 1771489024871527 2026-02-19T08:17:05.021301Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:05.021423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:05.083301Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:05.084423Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:05.128737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:05.128773Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:05.128783Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:05.128886Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:05.585226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:05.594368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:05.659494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:05.824002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:05.887312Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:05.894330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:08.844952Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487444531760810:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:08.845044Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:08.845296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487444531760819:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:08.845344Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:08.910063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:08.961422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:08.996058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.032581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.066667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.104328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.142542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.201160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:09.298396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487448826728988:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:09.298503Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:09.298533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487448826728993:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:09.298870Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487448826728995:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:09.298909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:09.303506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:09.317653Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487448826728996:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:09.381415Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487448826729048:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:09.873249Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487427351889782:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:09.873331Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:11.404277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:19.967412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:17:19.967448Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:04.360677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:04.527080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:04.568412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:04.568495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:04.569072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:04.885477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:04.885604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:04.983526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489141452505 != 1771489141452509 2026-02-19T08:19:04.993169Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:05.042044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:05.156362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:05.557944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:05.572186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:05.702894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:05.799697Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:19:05.799974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:05.914356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:05.914545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:05.916304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:05.916394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:05.916477Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:05.916862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:05.916999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:05.917083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:19:05.930295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:06.031902Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:06.032147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:06.032267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:19:06.032307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:06.032343Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:06.032387Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:06.032865Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:06.032984Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:06.033109Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:06.033152Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:06.033221Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:06.033274Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:06.033352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:19:06.033506Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:06.041907Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:06.042089Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:06.044366Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:06.058323Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:06.058450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:06.196085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:19:06.202954Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:06.203063Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:06.203400Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:06.203455Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:06.203511Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:06.203819Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:19:06.203991Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:06.204736Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:06.204827Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:19:06.215161Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:06.215679Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:06.218368Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:19:06.218425Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:06.218840Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:06.218935Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:06.220629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:06.220683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:06.220725Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:06.220808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:06.220871Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:06.220963Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:06.225779Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:06.227894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:06.227975Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:06.228754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:24.760887Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:24.762648Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:24.762707Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:24.762769Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:24.762853Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:24.762917Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:24.763023Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:24.764321Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:24.766644Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:24.766729Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:24.767642Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:24.780863Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:24.781055Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:24.781116Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:236: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-02-19T08:19:24.781154Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-02-19T08:19:24.781647Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:24.797659Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:24.985306Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:24.985388Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:24.985800Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:24.985862Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:24.985915Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:24.986121Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-02-19T08:19:24.986262Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:24.986489Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:24.987641Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:25.036191Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-02-19T08:19:25.036334Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:25.036389Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:25.036442Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:25.036533Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:25.036604Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-02-19T08:19:25.036711Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:25.038925Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-02-19T08:19:25.039019Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:25.050777Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1068:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:25.050892Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:25.050967Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:25.051660Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1083:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:25.051726Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:25.063995Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:25.072113Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:25.249351Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:25.252569Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1082:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:19:25.286209Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1140:2944] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:25.397581Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1170:2961], serverId# [4:1171:2962], sessionId# [0:0:0] 2026-02-19T08:19:25.406750Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037888 2026-02-19T08:19:25.407150Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489165407014 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:25.407411Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-02-19T08:19:25.419970Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:25.420092Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:25.552498Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:4] at 72075186224037888 2026-02-19T08:19:25.552795Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489165552689 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:25.552977Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-02-19T08:19:25.564106Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:25.564187Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:25.566234Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1197:2980], serverId# [4:1198:2981], sessionId# [0:0:0] 2026-02-19T08:19:25.575980Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1199:2982], serverId# [4:1200:2983], sessionId# [0:0:0] >> VectorIndexBuildTest::UnknownState [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:07.699442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:07.812227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:07.830898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:07.830985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:07.831572Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:08.147315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:08.147434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:08.249595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489144416635 != 1771489144416639 2026-02-19T08:19:08.260500Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:08.311851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:08.422326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:08.758373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:08.783240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:08.906220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:08.992066Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:19:08.992447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:09.092163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:09.092438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:09.094341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:09.094457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:09.094523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:09.094916Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:09.095329Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:19:09.095628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:09.104275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:09.104453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:19:09.105480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:09.105580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:09.107145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:19:09.107230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:19:09.107286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:19:09.107592Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:09.107724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:09.107787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:19:09.119186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:09.160256Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:09.160507Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:09.160664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:19:09.160722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:09.160772Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:09.160812Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:09.161239Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:09.161298Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:09.161365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:09.161425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:19:09.161483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:09.161514Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:09.161562Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:09.162157Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:09.162281Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:09.162409Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:09.162471Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:09.162533Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:09.162579Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:09.162635Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:19:09.162740Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:19:09.162865Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:19:09.162930Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:09.162958Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:09.162985Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:09.163017Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:09.163213Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:09.163492Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:09.163607Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:09.164124Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:897:2775], serverId# [1:909:2782], sessionId# [0:0:0] 2026-02-19T08:19:09.164581Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:19:09.164806Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:09.164889Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-02-19T08:19:09.166966Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:09.167058Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:09.178622Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:09.178760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:09.179380Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:19:09.179511Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:09.301884Z node 1 :TX_DATASHARD ... ansaction::Execute at 72075186224037889 2026-02-19T08:19:26.672616Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:26.672648Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037889 2026-02-19T08:19:26.672857Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:19:26.672953Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:26.673186Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:26.673237Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-02-19T08:19:26.673663Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:26.676436Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:26.677654Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:26.677754Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:19:26.678170Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:26.678483Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:26.679557Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:19:26.679608Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:26.682949Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:26.683069Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:26.683140Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-02-19T08:19:26.683192Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:26.684552Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-02-19T08:19:26.684615Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:26.686655Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:26.686713Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:26.686767Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:26.686859Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:26.686920Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:26.687046Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:26.691344Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:26.691426Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:26.691476Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037889 2026-02-19T08:19:26.691554Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:26.691603Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:26.691675Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:26.692330Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:26.692456Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:26.704411Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:26.704525Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:26.709660Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:26.709779Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:26.714720Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-02-19T08:19:26.714795Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:19:26.724862Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1004:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:26.724990Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1015:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:26.725088Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:26.725834Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1019:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:26.725908Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:26.732500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:26.740486Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:26.740665Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:26.956941Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:26.957057Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:26.960143Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1018:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:19:26.996677Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1077:2899] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:27.094673Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1145:2930], serverId# [4:1146:2931], sessionId# [0:0:0] 2026-02-19T08:19:27.095102Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037889 2026-02-19T08:19:27.095413Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489167095312 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:27.095613Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-02-19T08:19:27.109089Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:27.109191Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:27.114511Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1152:2936], serverId# [4:1153:2937], sessionId# [0:0:0] 2026-02-19T08:19:27.120769Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1154:2938], serverId# [4:1155:2939], sessionId# [0:0:0] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::UnknownState [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:29.650801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:29.650893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.650968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:29.651049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:29.651098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:29.651138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:29.651176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:29.651214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:29.651283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.651367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:29.652292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:29.652644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:29.757855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:29.757913Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:29.771792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:29.772542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:29.772773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:29.784226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:29.784907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:29.785606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:29.785927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:29.789335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.789497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:29.790598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:29.790660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.790772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:29.790817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:29.790855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:29.791244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:29.951699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.952718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.952849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.952919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.952991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.953816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Id: 18] was 0 2026-02-19T08:19:27.935402Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-02-19T08:19:27.935428Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-02-19T08:19:27.935450Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-02-19T08:19:27.935475Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-02-19T08:19:27.935497Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-02-19T08:19:27.935518Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-02-19T08:19:27.935541Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-02-19T08:19:27.935565Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-02-19T08:19:27.935588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-02-19T08:19:27.935621Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:19:27.935645Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:19:27.935670Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:19:27.935696Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:19:27.935720Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:19:27.935742Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:19:27.935764Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:19:27.935786Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:19:27.935808Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:19:27.935831Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:19:27.936107Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 10, at schemeshard: 72057594046678944 2026-02-19T08:19:27.936343Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.936497Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:19:27.936826Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:19:27.936882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:19:27.936906Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:19:27.936929Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-02-19T08:19:27.937093Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 4, at schemeshard: 72057594046678944 2026-02-19T08:19:27.937381Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.937636Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 12, at schemeshard: 72057594046678944 2026-02-19T08:19:27.938098Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.938163Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:19:27.938340Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.938833Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.938934Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.939277Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.939456Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.939545Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.939684Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.939908Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.940007Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.940543Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.941119Z node 6 :BUILD_INDEX DEBUG: schemeshard_index_build_info.h:626: Restored index build id# 102: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, State: Filling, SubState: None, IsBroken: 1, IsCancellationRequested: 0, Issue: Unknown build kind: 999999, SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:27.941239Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.941359Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.941438Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:19:27.942007Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 1 tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.942125Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.942220Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:27.948554Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:27.954878Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:27.954992Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:27.955093Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:27.955165Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:27.955252Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:27.956912Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [6:1277:3173] sender: [6:1340:2058] recipient: [6:15:2062] 2026-02-19T08:19:28.023532Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-02-19T08:19:28.023781Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } 2026-02-19T08:19:28.024839Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=102&Page=BuildIndexInfo 2026-02-19T08:19:28.024936Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=102&Page=BuildIndexInfo >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> KqpLimits::TooBigQuery+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:09.244998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:09.364517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:09.388924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:09.389013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:09.389594Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:09.717932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:09.718079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:09.814196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489146050667 != 1771489146050671 2026-02-19T08:19:09.824787Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:09.874982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:09.986588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:10.330245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:10.346065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:10.465533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:10.517776Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:19:10.518078Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:10.572605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:10.573019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:10.575163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:10.575282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:10.575358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:10.575792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:10.576233Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:19:10.576553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:10.585700Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:10.585890Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:19:10.586969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:10.587083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:10.588624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:19:10.588711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:19:10.588774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:19:10.589090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:10.589218Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:10.589311Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:19:10.600255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:10.636583Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:10.636888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:10.637062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:19:10.637115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:10.637156Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:10.637192Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:10.637575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:10.637617Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:10.637676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:10.637761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:19:10.637825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:10.637857Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:10.637899Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:10.638367Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:10.638475Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:10.638577Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:10.638625Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:10.638686Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:10.638743Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:10.638789Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:19:10.638875Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:19:10.638990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:19:10.639040Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:10.639066Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:10.639089Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:10.639136Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:10.639286Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:10.639559Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:10.639694Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:10.640205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:897:2775], serverId# [1:909:2782], sessionId# [0:0:0] 2026-02-19T08:19:10.640801Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:19:10.641050Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:10.641124Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-02-19T08:19:10.643471Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:10.643578Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:10.655608Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:10.655756Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:10.656485Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:19:10.656596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:10.788198Z node 1 :TX_DATASHARD ... X_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:29.226769Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2026-02-19T08:19:29.226829Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:29.227595Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:19:29.227637Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037890 2026-02-19T08:19:29.227673Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037890 2026-02-19T08:19:29.227728Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:29.227770Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:29.227851Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:19:29.233464Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:29.233703Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-02-19T08:19:29.236219Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:19:29.237076Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:29.237472Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:29.237885Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:29.237942Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:29.238595Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-02-19T08:19:29.238653Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-02-19T08:19:29.251103Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1051:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.251232Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.251307Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.252238Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1065:2896], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.252319Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.257204Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:29.267068Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:29.267218Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:29.267274Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:29.397534Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:29.397639Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:29.397687Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-02-19T08:19:29.400671Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1066:2897], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:19:29.437983Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1125:2937] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:29.559164Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1230:2981], serverId# [4:1231:2982], sessionId# [0:0:0] 2026-02-19T08:19:29.559537Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037889 2026-02-19T08:19:29.559806Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489169559707 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.559983Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489169559707 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.560085Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-02-19T08:19:29.571190Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:29.571283Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:29.648303Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037889 2026-02-19T08:19:29.648643Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 3 Group: 1771489169648519 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.648848Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 4 Group: 1771489169648519 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.648961Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 5 Group: 1771489169648519 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.649089Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 6 Group: 1771489169648519 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:29.649187Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-02-19T08:19:29.662330Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:29.662411Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:29.666726Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1278:3021], serverId# [4:1279:3022], sessionId# [0:0:0] 2026-02-19T08:19:29.672691Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1280:3023], serverId# [4:1281:3024], sessionId# [0:0:0] >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicNoConsumer [GOOD] >> TestSqsTopicHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [GOOD] >> VectorIndexBuildTest::Shard_Build_Error >> KqpExplain::UpdateOn-UseSink >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:10.839997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:10.965398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:10.985415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:10.985501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:10.986082Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:11.296066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:11.296190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:11.390697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489147859810 != 1771489147859814 2026-02-19T08:19:11.401379Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:11.452516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:11.574102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:11.921630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:11.938284Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:12.051017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:12.109583Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:19:12.118037Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:12.178417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:12.178655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:12.180336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:12.180445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:12.180506Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:12.180909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:12.181240Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:19:12.181485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:12.196159Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:12.196293Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:19:12.199388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:12.199513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:12.200881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:19:12.200960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:19:12.201006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:19:12.201316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:12.201455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:12.201531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:19:12.213149Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:12.252439Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:12.252842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:12.252981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:19:12.253027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:12.253061Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:12.253095Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:12.253422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:12.253458Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:12.253537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:12.253586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:19:12.253624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:12.253660Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:12.253697Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:12.254108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:12.254195Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:12.254277Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:12.254321Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:12.254380Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:12.254421Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:12.254463Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:19:12.254540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:19:12.254642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:19:12.254685Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:19:12.254706Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:12.254726Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:12.254749Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:12.254870Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:12.255110Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:12.255233Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:12.255703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:897:2775], serverId# [1:909:2782], sessionId# [0:0:0] 2026-02-19T08:19:12.256106Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:19:12.256278Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-02-19T08:19:12.256330Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-02-19T08:19:12.262730Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:12.262907Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:12.273740Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:12.273864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:12.274533Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:19:12.274612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:12.395340Z node 1 :TX_DATASHARD ... TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:19:30.258433Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:30.258881Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:30.260455Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:19:30.260517Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:30.262419Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:30.262531Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:30.262611Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-02-19T08:19:30.262648Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:30.264082Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-02-19T08:19:30.264151Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:30.266283Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:30.266358Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:30.266417Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:30.266534Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:30.266597Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:30.266692Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:30.269048Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:19:30.269124Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:19:30.269169Z node 4 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037889 2026-02-19T08:19:30.269235Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:30.269291Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:30.269365Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:30.270022Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:30.270158Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:30.274471Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:30.274582Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:30.275446Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:30.275531Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:19:30.276041Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-02-19T08:19:30.276097Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:19:30.288726Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1004:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.288849Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1015:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.288943Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.289674Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1019:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.290015Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.295740Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:30.317182Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:30.317348Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:30.459032Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:30.459173Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-02-19T08:19:30.462604Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1018:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:19:30.501216Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1077:2899] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:30.589392Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1145:2930], serverId# [4:1146:2931], sessionId# [0:0:0] 2026-02-19T08:19:30.590260Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037889 2026-02-19T08:19:30.590614Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489170590474 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:30.590847Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-02-19T08:19:30.604639Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:30.604765Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:30.707166Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037889 2026-02-19T08:19:30.707520Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 1771489170707391 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:30.707717Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 3 Group: 1771489170707391 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-02-19T08:19:30.707824Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-02-19T08:19:30.718997Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:19:30.719078Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:19:30.723969Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1185:2962], serverId# [4:1186:2963], sessionId# [0:0:0] 2026-02-19T08:19:30.732872Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [4:1187:2964], serverId# [4:1188:2965], sessionId# [0:0:0] >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicNoConsumerWithExtendedRetention >> TestSqsTopicHttpProxy::TestCreateQueueWithAttributes >> KqpParams::DefaultParameterValue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> TBackupCollectionTests::DropCollectionWithFullBackup [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 65072, msgbus: 17609 2026-02-19T08:14:09.310063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486675926736748:2218];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:09.310149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:09.558341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:09.621479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:09.636876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:09.756213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:09.854526Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:09.861798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:10.102370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:10.102402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:10.102416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:10.102526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:10.310304Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486675926736808:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:10.310370Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486680221704862:2713] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:10.311443Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486680221704862:2713] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:10.315514Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:10.350894Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486680221704862:2713] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:10.357263Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486680221704862:2713] Handle TEvDescribeSchemeResult Forward to# [1:7608486680221704861:2712] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:10.359728Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486675926736808:2143] Handle TEvProposeTransaction 2026-02-19T08:14:10.359754Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486675926736808:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:14:10.359817Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486675926736808:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608486680221704876:2720] 2026-02-19T08:14:10.460116Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486680221704876:2720] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:10.460209Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486680221704876:2720] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:10.460231Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486680221704876:2720] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:10.460287Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486680221704876:2720] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:10.460699Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486680221704876:2720] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:10.460827Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486680221704876:2720] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:10.460930Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486680221704876:2720] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:14:10.461081Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486680221704876:2720] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:14:10.461842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:10.468085Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486680221704876:2720] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:14:10.468143Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486680221704876:2720] txid# 281474976715657 SEND to# [1:7608486680221704875:2719] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:14:10.484961Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486675926736808:2143] Handle TEvProposeTransaction 2026-02-19T08:14:10.484986Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486675926736808:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-02-19T08:14:10.485034Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486675926736808:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7608486680221704912:2753] 2026-02-19T08:14:10.487105Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486680221704912:2753] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:10.487158Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486680221704912:2753] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:10.487179Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486680221704912:2753] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:10.487224Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486680221704912:2753] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:10.487509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486680221704912:2753] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:10.487612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486680221704912:2753] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:10.487678Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486680221704912:2753] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-02-19T08:14:10.487805Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486680221704912:2753] txid# 281474976715658 HANDLE EvClientConnected 2026-02-19T08:14:10.488348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:10.493408Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486680221704912:2753] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-02-19T08:14:10.493450Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486680221704912:2753] txid# 281474976715658 SEND to# [1:7608486680221704911:2752] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-02-19T08:14:10.545548Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486675926736808:2143] Handle TEvProposeTransaction 2026-02-19T08:14:10.545574Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:23 ... e 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487981802909372:2141] TxId# 281474976710664 ProcessProposeKqpTransaction 2026-02-19T08:19:22.333669Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608487981802909372:2141] Handle TEvProposeTransaction 2026-02-19T08:19:22.333698Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608487981802909372:2141] TxId# 281474976710665 ProcessProposeTransaction 2026-02-19T08:19:22.333763Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608487981802909372:2141] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [59:7608488016162649094:3010] 2026-02-19T08:19:22.335857Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488016162649094:3010] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58812" 2026-02-19T08:19:22.335920Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488016162649094:3010] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:22.335940Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488016162649094:3010] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:22.336075Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488016162649094:3010] txid# 281474976710665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:22.336116Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488016162649094:3010] txid# 281474976710665 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:22.336161Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488016162649094:3010] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:22.336422Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488016162649094:3010] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:22.336446Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488016162649094:3010] txid# 281474976710665, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:22.336533Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488016162649094:3010] txid# 281474976710665, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:22.336559Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488016162649094:3010] txid# 281474976710665 SEND to# [59:7608488016162649093:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:22.337463Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmYwNjUzOTktZjJjNjAzZDItNGI0NzJkN2ItNTkzNjI5OWU=, ActorId: [59:7608488007572714452:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfpx2afdymrbv34v7q7z0f, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:22.337733Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487981802909372:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:22.337756Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487981802909372:2141] TxId# 281474976710666 ProcessProposeKqpTransaction 2026-02-19T08:19:24.430198Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608487981802909372:2141] Handle TEvProposeTransaction 2026-02-19T08:19:24.430228Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608487981802909372:2141] TxId# 281474976710667 ProcessProposeTransaction 2026-02-19T08:19:24.430275Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608487981802909372:2141] Cookie# 0 userReqId# "" txid# 281474976710667 SEND to# [59:7608488024752583725:3026] 2026-02-19T08:19:24.432266Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488024752583725:3026] txid# 281474976710667 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:50828" 2026-02-19T08:19:24.432339Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488024752583725:3026] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:24.432359Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488024752583725:3026] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:24.432508Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488024752583725:3026] txid# 281474976710667 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:24.432550Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488024752583725:3026] txid# 281474976710667 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:24.432593Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488024752583725:3026] txid# 281474976710667 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:24.432866Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488024752583725:3026] txid# 281474976710667 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:24.432891Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488024752583725:3026] txid# 281474976710667, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:24.432974Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488024752583725:3026] txid# 281474976710667, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:24.433001Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488024752583725:3026] txid# 281474976710667 SEND to# [59:7608488024752583724:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:24.433824Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmYwNjUzOTktZjJjNjAzZDItNGI0NzJkN2ItNTkzNjI5OWU=, ActorId: [59:7608488007572714452:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfpz3z0qbkj1gjxm6bw61k, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:24.434232Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487981802909372:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:24.434255Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487981802909372:2141] TxId# 281474976710668 ProcessProposeKqpTransaction 2026-02-19T08:19:29.934423Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:29.934454Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:30.017291Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608487981802909372:2141] Handle TEvProposeTransaction 2026-02-19T08:19:30.017330Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608487981802909372:2141] TxId# 281474976710669 ProcessProposeTransaction 2026-02-19T08:19:30.017390Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608487981802909372:2141] Cookie# 0 userReqId# "" txid# 281474976710669 SEND to# [59:7608488050522387599:3071] 2026-02-19T08:19:30.019717Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488050522387599:3071] txid# 281474976710669 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58812" 2026-02-19T08:19:30.019790Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488050522387599:3071] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:30.019811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488050522387599:3071] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:30.019960Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488050522387599:3071] txid# 281474976710669 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:30.020008Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488050522387599:3071] txid# 281474976710669 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:30.020057Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488050522387599:3071] txid# 281474976710669 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:30.020334Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488050522387599:3071] txid# 281474976710669 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:30.020361Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488050522387599:3071] txid# 281474976710669, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:30.020449Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488050522387599:3071] txid# 281474976710669, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:30.020477Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488050522387599:3071] txid# 281474976710669 SEND to# [59:7608488050522387598:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:30.021493Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmYwNjUzOTktZjJjNjAzZDItNGI0NzJkN2ItNTkzNjI5OWU=, ActorId: [59:7608488007572714452:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfq4jd4bff86hf7qnhwq4n, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:30.027652Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487981802909372:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:30.027684Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487981802909372:2141] TxId# 281474976710670 ProcessProposeKqpTransaction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> VectorIndexBuildTest::Shard_Build_Error [GOOD] >> VectorIndexBuildTest::PrefixedLimit >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TestSqsTopicHttpProxy::TestDeleteMessageBatch [GOOD] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 18068, msgbus: 28635 2026-02-19T08:13:58.340036Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486627334830375:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:58.340269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:58.578340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:13:58.609370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:58.609496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:58.647336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:58.688557Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:58.741570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:13:58.847420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:58.847444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:58.847454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:58.847548Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:59.060089Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486627334830583:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:13:59.060149Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486631629798629:2707] HANDLE EvNavigateScheme dc-1 2026-02-19T08:13:59.060509Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486631629798629:2707] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:59.091068Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486631629798629:2707] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:13:59.097263Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486631629798629:2707] Handle TEvDescribeSchemeResult Forward to# [1:7608486631629798628:2706] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:13:59.104480Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486627334830583:2143] Handle TEvProposeTransaction 2026-02-19T08:13:59.104512Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486627334830583:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:13:59.104615Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486627334830583:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486631629798634:2712] 2026-02-19T08:13:59.220296Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486631629798634:2712] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:59.220403Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486631629798634:2712] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:13:59.220438Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486631629798634:2712] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:59.220515Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486631629798634:2712] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:59.221079Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486631629798634:2712] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:59.221218Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486631629798634:2712] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:13:59.221319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486631629798634:2712] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:13:59.221497Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486631629798634:2712] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:13:59.222383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:59.224635Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486631629798634:2712] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:13:59.224680Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486631629798634:2712] txid# 281474976710657 SEND to# [1:7608486631629798633:2711] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:13:59.243044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486627334830583:2143] Handle TEvProposeTransaction 2026-02-19T08:13:59.243071Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486627334830583:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:13:59.243109Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486627334830583:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486631629798673:2748] 2026-02-19T08:13:59.244970Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486631629798673:2748] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:59.245027Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486631629798673:2748] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:13:59.245040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486631629798673:2748] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:59.245087Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486631629798673:2748] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:59.245422Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486631629798673:2748] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:59.245566Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486631629798673:2748] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:13:59.245652Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486631629798673:2748] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:13:59.245842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486631629798673:2748] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:13:59.246359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:13:59.250282Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486631629798673:2748] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:13:59.250336Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486631629798673:2748] txid# 281474976710658 SEND to# [1:7608486631629798672:2747] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:13:59.285288Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486627334830583:2143] Handle TEvProposeTransaction 2026-02-19T08:13:59.285324Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486627334830583:2143] TxId# 281474976710659 ProcessProposeTransaction 2026-02-19T08:13:59.285387Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [ ... ImV4cCI6MTc3MTUzMjM2NCwiaWF0IjoxNzcxNDg5MTY0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47940" 2026-02-19T08:19:25.713874Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488028979956734:3037] txid# 281474976710666 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:25.713896Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488028979956734:3037] txid# 281474976710666 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:25.714064Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488028979956734:3037] txid# 281474976710666 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:25.714138Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488028979956734:3037] txid# 281474976710666 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:25.714188Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488028979956734:3037] txid# 281474976710666 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:25.714509Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488028979956734:3037] txid# 281474976710666 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:25.714542Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488028979956734:3037] txid# 281474976710666, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:25.714646Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488028979956734:3037] txid# 281474976710666, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:25.714682Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488028979956734:3037] txid# 281474976710666 SEND to# [59:7608488028979956733:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:25.721912Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZDU1NTEyY2MtZjhhNGVhMTgtODRlYWI5ZDctMWI1YzFkZmQ=, ActorId: [59:7608488024684989397:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfq0b85ztn8envbtb92cpw, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:25.725049Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487998915184194:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:25.725081Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487998915184194:2141] TxId# 281474976710667 ProcessProposeKqpTransaction 2026-02-19T08:19:28.120503Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608487998915184194:2141] Handle TEvProposeTransaction 2026-02-19T08:19:28.120537Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608487998915184194:2141] TxId# 281474976710668 ProcessProposeTransaction 2026-02-19T08:19:28.120581Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608487998915184194:2141] Cookie# 0 userReqId# "" txid# 281474976710668 SEND to# [59:7608488041864858670:3058] 2026-02-19T08:19:28.122743Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488041864858670:3058] txid# 281474976710668 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM2NCwiaWF0IjoxNzcxNDg5MTY0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.DelGzeh2xzImzi5kzdHjQbbTSPFppWS0v3qIZpzi6Yuhqz8cj7hN0AzZ8gmmUEl2NV4UG-yLqSfiq62QdOojQa-_L7i0SQqwRoHknMx23aepWqNfoxF6A4Jd0-yJhWG4hH_K72Pd-PYysVsRhJ39EFn75N4Jy3hnisavGaZc9jPel_KsVcVD_rjpY_kcVqvN4ovSCdvV-AKM0HFhQQ9lAaFLQ5XvUZ3OR00URdZ_Bcl8CpAUgbFXtkwer4Ttr5WUOGDeZjEvJPWLQveLGHLD1eo58Sz5bgYADtgCTKQjkQ8Q-BqUr5FkPGwE-QY-M2nQG5l7As-HmGDmX6lOwRKk1Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM2NCwiaWF0IjoxNzcxNDg5MTY0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47940" 2026-02-19T08:19:28.122829Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488041864858670:3058] txid# 281474976710668 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:28.122851Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488041864858670:3058] txid# 281474976710668 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:28.122995Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488041864858670:3058] txid# 281474976710668 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:28.123062Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488041864858670:3058] txid# 281474976710668 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:28.123109Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488041864858670:3058] txid# 281474976710668 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:28.123423Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488041864858670:3058] txid# 281474976710668 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:28.123449Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488041864858670:3058] txid# 281474976710668, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:28.123543Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488041864858670:3058] txid# 281474976710668, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:28.123572Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488041864858670:3058] txid# 281474976710668 SEND to# [59:7608488041864858669:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:28.124437Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZDU1NTEyY2MtZjhhNGVhMTgtODRlYWI5ZDctMWI1YzFkZmQ=, ActorId: [59:7608488024684989397:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfq2pp15tcer520qaw4r5j, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:28.124929Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487998915184194:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:28.124947Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487998915184194:2141] TxId# 281474976710669 ProcessProposeKqpTransaction 2026-02-19T08:19:33.389804Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:33.389837Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:33.902291Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608487998915184194:2141] Handle TEvProposeTransaction 2026-02-19T08:19:33.902326Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608487998915184194:2141] TxId# 281474976710670 ProcessProposeTransaction 2026-02-19T08:19:33.902374Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608487998915184194:2141] Cookie# 0 userReqId# "" txid# 281474976710670 SEND to# [59:7608488063339695245:3101] 2026-02-19T08:19:33.904694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488063339695245:3101] txid# 281474976710670 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM2NCwiaWF0IjoxNzcxNDg5MTY0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.DelGzeh2xzImzi5kzdHjQbbTSPFppWS0v3qIZpzi6Yuhqz8cj7hN0AzZ8gmmUEl2NV4UG-yLqSfiq62QdOojQa-_L7i0SQqwRoHknMx23aepWqNfoxF6A4Jd0-yJhWG4hH_K72Pd-PYysVsRhJ39EFn75N4Jy3hnisavGaZc9jPel_KsVcVD_rjpY_kcVqvN4ovSCdvV-AKM0HFhQQ9lAaFLQ5XvUZ3OR00URdZ_Bcl8CpAUgbFXtkwer4Ttr5WUOGDeZjEvJPWLQveLGHLD1eo58Sz5bgYADtgCTKQjkQ8Q-BqUr5FkPGwE-QY-M2nQG5l7As-HmGDmX6lOwRKk1Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM2NCwiaWF0IjoxNzcxNDg5MTY0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47940" 2026-02-19T08:19:33.904765Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488063339695245:3101] txid# 281474976710670 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:33.904784Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488063339695245:3101] txid# 281474976710670 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:33.904934Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488063339695245:3101] txid# 281474976710670 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:33.904995Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488063339695245:3101] txid# 281474976710670 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:33.905041Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488063339695245:3101] txid# 281474976710670 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:33.905303Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488063339695245:3101] txid# 281474976710670 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:33.905330Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488063339695245:3101] txid# 281474976710670, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:33.905422Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488063339695245:3101] txid# 281474976710670, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:33.905452Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488063339695245:3101] txid# 281474976710670 SEND to# [59:7608488063339695244:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:33.905899Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZDU1NTEyY2MtZjhhNGVhMTgtODRlYWI5ZDctMWI1YzFkZmQ=, ActorId: [59:7608488024684989397:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfq8bc5ga7b6aefnms0sgf, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:33.907630Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608487998915184194:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:33.907661Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608487998915184194:2141] TxId# 281474976710671 ProcessProposeKqpTransaction >> TGRpcCmsTest::RemoveWithAnotherTokenTest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TBackupCollectionTests::DropCollectionWithIncrementalBackup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup >> KqpPg::LongDomainName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:06.898432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:07.022056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:07.044415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:07.044499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:07.044984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:07.428515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:07.428638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:07.523465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489143741172 != 1771489143741176 2026-02-19T08:19:07.533073Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:07.579962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:07.687334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:08.014665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:08.028375Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:08.150440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:08.193354Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:19:08.193794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:08.242793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:08.242964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:08.245072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:08.245200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:08.245289Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:08.245742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:08.245922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:08.246049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:19:08.257820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:08.304206Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:08.304462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:08.304616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:19:08.304662Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:08.304707Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:08.304749Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:08.305295Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:08.305399Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:08.305473Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:08.305536Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:08.305598Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:08.305652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:08.306141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:19:08.306364Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:08.306670Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:08.306806Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:19:08.309107Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:08.323817Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:08.323989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:19:08.464455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:923:2788], serverId# [1:925:2790], sessionId# [0:0:0] 2026-02-19T08:19:08.482927Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:08.483050Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:08.483470Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:08.483529Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:08.483592Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:08.483939Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:19:08.484130Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:08.485080Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:08.485177Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:19:08.490072Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:08.490648Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:08.493314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:19:08.493383Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:08.494002Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:19:08.494120Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:08.496125Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:08.496179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:08.496242Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:19:08.496337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:08.496408Z node 1 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:19:08.496513Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:08.501877Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:08.504185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:19:08.504296Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:08.505179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSu ... 1 2026-02-19T08:19:35.291564Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:35.292238Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1855: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 2, step# 2500, txId# 281474976715658, at tablet# 72075186224037888 2026-02-19T08:19:35.292537Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:35.337438Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-02-19T08:19:35.337562Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:35.337611Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:35.337668Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:35.337767Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:35.337834Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-02-19T08:19:35.337945Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:35.340434Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-02-19T08:19:35.340533Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:35.350158Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1068:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.350268Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.350338Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.351062Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1083:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.351145Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.355931Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:35.362922Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:35.531259Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:19:35.535053Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1082:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:19:35.572285Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:1140:2944] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:35.656935Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1170:2961], serverId# [4:1171:2962], sessionId# [0:0:0] 2026-02-19T08:19:35.657341Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037888 2026-02-19T08:19:35.657540Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 1 Group: 1771489175657448 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:35.657788Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-02-19T08:19:35.668868Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-02-19T08:19:35.668957Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:35.698633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:19:35.701524Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:35.705540Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2026-02-19T08:19:35.705651Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:236: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2026-02-19T08:19:35.705723Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2026-02-19T08:19:35.720128Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:19:35.837911Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:19:35.838004Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:35.838229Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:35.838289Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:19:35.838342Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715662] in PlanQueue unit at 72075186224037888 2026-02-19T08:19:35.838668Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 3500:281474976715662 keys extracted: 0 2026-02-19T08:19:35.838831Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:19:35.839080Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:35.839181Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2026-02-19T08:19:35.840207Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1855: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 3, step# 3500, txId# 281474976715662, at tablet# 72075186224037888 2026-02-19T08:19:35.840384Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:886: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 3500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-02-19T08:19:35.840850Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:35.847739Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2026-02-19T08:19:35.847866Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:35.848929Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:35.849027Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1184: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-02-19T08:19:35.849140Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [3500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:19:35.849201Z node 4 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-02-19T08:19:35.849331Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:35.853284Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2026-02-19T08:19:35.853387Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:19:35.860815Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1213:2999], serverId# [4:1214:3000], sessionId# [0:0:0] 2026-02-19T08:19:35.882233Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [4:1216:3002], serverId# [4:1217:3003], sessionId# [0:0:0] >> VectorIndexBuildTest::PrefixedLimit [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TGRpcCmsTest::AlterRemoveTest >> TGRpcCmsTest::AuthTokenTest >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |91.7%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicNoConsumerWithExtendedRetention [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::PrefixedLimit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:28.657051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:28.657143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:28.657217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:28.657277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:28.657324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:28.657366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:28.657395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:28.657424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:28.657516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:28.657628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:28.658553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:28.658915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:28.758308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:28.758373Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:28.772532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:28.775758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:28.775967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:28.796568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:28.797301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:28.798131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:28.798442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:28.801947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:28.802131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:28.803304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:28.803369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:28.803477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:28.803521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:28.803563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:28.803749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:28.976694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.977757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.977916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.977987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:28.978901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... :38.737575Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 104 Rejection_Unlocking TBuildInfo{ IndexBuildId: 104, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: prefix, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 6, paths: 6, delta: 1, SubscribersCount: 1, CreateSender: [6:1009:2927], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:38.738074Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976730761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976730761 msg type: 269090816 2026-02-19T08:19:38.738214Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976730761, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:19:38.738406Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976730761, at schemeshard: 72057594046678944 2026-02-19T08:19:38.738445Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 0/1, is published: true 2026-02-19T08:19:38.738485Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976730761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976730761 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976730761 at step: 5000042 2026-02-19T08:19:38.738710Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:38.738811Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976730761 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 25769805926 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:19:38.738880Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976730761:0 HandleReply TEvOperationPlan: step# 5000042 2026-02-19T08:19:38.738952Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976730761:0 Propose -> Done FAKE_COORDINATOR: Erasing txId 281474976730761 2026-02-19T08:19:38.741192Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730761:0, at schemeshard: 72057594046678944 2026-02-19T08:19:38.741247Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976730761:0 ProgressState 2026-02-19T08:19:38.741346Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-02-19T08:19:38.741385Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:19:38.741429Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-02-19T08:19:38.741463Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:19:38.741504Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2026-02-19T08:19:38.741582Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:948:2879] message: TxId: 281474976730761 2026-02-19T08:19:38.741632Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-02-19T08:19:38.741674Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2026-02-19T08:19:38.741719Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976730761:0 2026-02-19T08:19:38.741809Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:19:38.744235Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2026-02-19T08:19:38.744308Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976730761 2026-02-19T08:19:38.744394Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 104, txId# 281474976730761 2026-02-19T08:19:38.744543Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 104, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: prefix, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 6, paths: 6, delta: 1, SubscribersCount: 1, CreateSender: [6:1009:2927], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2026-02-19T08:19:38.749903Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 104 Rejection_Unlocking 2026-02-19T08:19:38.750098Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 104 Rejection_Unlocking TBuildInfo{ IndexBuildId: 104, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: prefix, IndexColumn: embedding, State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 6, paths: 6, delta: 1, SubscribersCount: 1, CreateSender: [6:1009:2927], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:38.750181Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-02-19T08:19:38.753772Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 104 Rejected 2026-02-19T08:19:38.753976Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 104 Rejected TBuildInfo{ IndexBuildId: 104, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: idx_global, IndexColumn: prefix, IndexColumn: embedding, State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At CreateBuild state got unsuccess propose result, status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/Table/idx_global/indexImplPostingTable0build', error: paths count limit exceeded, limit: 6, paths: 6, delta: 1, SubscribersCount: 1, CreateSender: [6:1009:2927], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:19:38.754059Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 104, subscribers count# 1 2026-02-19T08:19:38.754242Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:19:38.754318Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [6:1218:3092] TestWaitNotification: OK eventTxId 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: 2026-02-19T08:17:08.932409Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487443540787269:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:08.932524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:09.132448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:09.155182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:09.155303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:09.203730Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:09.206630Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487443540787239:2081] 1771489028930147 != 1771489028930150 2026-02-19T08:17:09.219484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:09.314501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:09.314539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:09.314552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:09.314685Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:09.353300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:09.718675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:09.955416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:12.159910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487460720657322:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:12.160013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:12.160015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487460720657330:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:12.160414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487460720657337:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:12.160503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:12.163758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:12.176966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487460720657336:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:17:12.287422Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487460720657389:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:13.267927Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487462207998068:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:13.267997Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:13.291218Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:13.388299Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:13.390379Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608487462207998023:2081] 1771489033266240 != 1771489033266243 2026-02-19T08:17:13.418174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:13.418265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:13.455045Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:13.461801Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:13.561740Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:13.561764Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:13.561770Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:13.561851Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:14.030885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:14.273605Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:16.639847Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487475092900811:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.639920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487475092900803:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.640077Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.642096Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487475092900818:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.642196Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:16.644568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:16.662092Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487475092900817:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:17:16.742741Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487475092900870:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:17.921012Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608487483373653436:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:17.921521Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:17.957403Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:18.084864Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:18.086209Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:18.086283Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:18.08 ... ol info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.779987Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.789301Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:24.808111Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7608488027420195957:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:19:24.866023Z node 10 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [10:7608488027420196010:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:24.954997Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-02-19T08:19:29.530528Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7608488049630642999:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:29.531132Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:29.585248Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-02-19T08:19:29.740575Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:29.745949Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7608488049630642960:2081] 1771489169524717 != 1771489169524720 2026-02-19T08:19:29.770984Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:29.771109Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:29.776582Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-02-19T08:19:29.823235Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:30.138578Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:30.138606Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:30.138617Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:30.138724Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:30.283512Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-02-19T08:19:30.537895Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:31.198901Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:31.210269Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:34.531743Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7608488049630642999:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:34.531867Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:36.825268Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608488079695414952:2337], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.825275Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608488079695414932:2334], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.825406Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.826145Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7608488079695414956:2339], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.826257Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.830693Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:36.847805Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7608488079695414955:2338], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:19:36.938127Z node 11 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [11:7608488079695415008:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:36.976658Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> KqpExplain::UpdateOn-UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink-UseStreamIndex |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TestSqsTopicHttpProxy::TestCreateQueueWithAttributes [GOOD] >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> TGRpcCmsTest::DisabledTxTest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2026-02-19T08:19:36.174754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488077285957875:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:36.175275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:36.613065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:36.659744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:36.659845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:36.687892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:36.736408Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:36.946383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:37.002399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:37.002420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:37.002426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:37.002501Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:37.198706Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:37.240790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:37.393774Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7608488081580926180:2306], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49788" } 2026-02-19T08:19:37.393855Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-02-19T08:19:37.393885Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:37.393899Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:37.394073Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49788" 2026-02-19T08:19:37.394229Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1771489177389960) 2026-02-19T08:19:37.444960Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1771489177389960 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-02-19T08:19:37.445207Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-02-19T08:19:37.454483Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-02-19T08:19:37.455407Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489177389960&action=1" } } } 2026-02-19T08:19:37.455545Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:37.455613Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:37.455731Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:37.456184Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-02-19T08:19:37.456303Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-02-19T08:19:37.458247Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7608488081580926180:2306], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489177389960&action=1" } UserToken: "" PeerName: "ipv6:[::1]:49788" } 2026-02-19T08:19:37.458276Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2026-02-19T08:19:37.458454Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3434: Add subscription to /Root/users/user-1 for [1:7608488081580926180:2306] 2026-02-19T08:19:37.458557Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3442: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489177389960&action=1" } } 2026-02-19T08:19:37.467929Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-02-19T08:19:37.467988Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:37.468052Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7608488081580926186:2206], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:37.468080Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:37.468094Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:37.468106Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:37.468149Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-02-19T08:19:37.468178Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-02-19T08:19:37.468243Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-02-19T08:19:37.487822Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:37.487852Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:37.487866Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:37.487873Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:37.487940Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-02-19T08:19:37.487979Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1771489177389960 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:37.501609Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:37.521653Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:37.521777Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-02-19T08:19:37.521790Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-02-19T08:19:37.537311Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:49788" 2026-02-19T08:19:37.539376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:19:37.542045Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-02-19T08:19:37.542126Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-02-19T08:19:37.547884Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-02-19T08:19:37.558008Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2026-02-19T08:19:37.561077Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got ... tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2026-02-19T08:19:38.750073Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037895 2026-02-19T08:19:38.750081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-02-19T08:19:38.750114Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037889 2026-02-19T08:19:38.750133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-02-19T08:19:38.750158Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-02-19T08:19:38.755091Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2026-02-19T08:19:38.755116Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-02-19T08:19:38.755171Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:38.755316Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7608488085875894098:2206], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:38.755342Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:38.755369Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:38.755379Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:38.755413Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-02-19T08:19:38.755432Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1771489178628164 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:38.755477Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489178628164 issue= 2026-02-19T08:19:38.755668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-02-19T08:19:38.755730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-02-19T08:19:38.755778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-02-19T08:19:38.755830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-02-19T08:19:38.755886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-02-19T08:19:38.760834Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-02-19T08:19:38.763263Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-02-19T08:19:38.763354Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-02-19T08:19:38.763375Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:38.763643Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488077285958190:2205], Recipient [1:7608488077285958305:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:38.763697Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:38.763715Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:38.763754Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:38.763782Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-02-19T08:19:38.763803Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1771489178628164 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:38.766752Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:38.766805Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:38.766834Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:38.766943Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:38.767748Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-02-19T08:19:38.767833Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-02-19T08:19:38.779049Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-02-19T08:19:38.779183Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7608488085875894214:2206], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:38.779248Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:38.779270Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:38.779280Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:38.779354Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-02-19T08:19:38.779386Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-02-19T08:19:38.805145Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:38.805187Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:38.805195Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:38.805203Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:38.805297Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1771489178628164 2026-02-19T08:19:38.805318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489178628164 issue= 2026-02-19T08:19:38.805355Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1771489178628164 issue= 2026-02-19T08:19:38.805370Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-02-19T08:19:38.805533Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1771489178628164 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:38.807582Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-02-19T08:19:38.807800Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2422: Send /Root/users/user-1 notification to [1:7608488085875894090:2374]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489178628164&action=2" ready: true status: SUCCESS } } 2026-02-19T08:19:38.807881Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:38.822456Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488085875894235:2376], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:49788" } 2026-02-19T08:19:38.822483Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:38.822631Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-02-19T08:19:38.831088Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7608488085875894238:2377], Recipient [1:7608488077285958305:2206]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:49788" } 2026-02-19T08:19:38.831118Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-02-19T08:19:38.831328Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-02-19T08:19:38.843432Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:19:38.843615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:19:41.585412Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestCreateQueueExistingTopicNoConsumerWithExtendedRetention [GOOD] Test command err: 2026-02-19T08:18:11.193446Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487712761583481:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:11.193524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:11.565800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:11.614346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:11.614436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:11.804710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:11.928789Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:11.934071Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487712761583262:2081] 1771489091135779 != 1771489091135782 2026-02-19T08:18:11.969837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:12.177855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:12.194457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:12.194481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:12.194488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:12.194578Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:12.518059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:12.532924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:18:12.874992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:12.884876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:18:12.891177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:12.944900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:18:13.064847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.355910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.433015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-02-19T08:18:13.438757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.514374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-02-19T08:18:13.520140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.606309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.655425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.840567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.934043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:13.998145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.084563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:16.197150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487712761583481:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:16.210055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:16.488511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487734236421413:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.488660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.493096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487734236421425:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.493215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.493555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487734236421427:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:16.503953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:16.545870Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487734236421429:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:18:16.642671Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487734236421481:3108] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:17.072028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.153812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core ... 07][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.240420Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.240440Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.240451Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.249387Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:19:41.252881Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:3668: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-02-19T08:19:41.255891Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:1060: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-02-19T08:19:41.258917Z node 7 :PERSQUEUE DEBUG: partition.cpp:1295: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1771489181301, TxId 281474976710691 2026-02-19T08:19:41.258980Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.259002Z node 7 :PERSQUEUE DEBUG: partition.cpp:2445: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-02-19T08:19:41.259051Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.259063Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.259110Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.259125Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.259445Z node 7 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771489181301, TxId 281474976710691 2026-02-19T08:19:41.259480Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.259497Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.259509Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.259539Z node 7 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:19:41.259731Z node 7 :PERSQUEUE DEBUG: partition.cpp:3870: [72075186224037907][Partition][0][StateIdle] Topic 'ExistingTopicNoConsumerRetention' partition 0 user ydb-sqs-consumer reinit with generation 2 done 2026-02-19T08:19:41.259746Z node 7 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710691 2026-02-19T08:19:41.259764Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:19:41.259777Z node 7 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:19:41.259792Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.260094Z node 7 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:19:41.264964Z node 7 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:19:41.265218Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-02-19T08:19:41.265232Z node 7 :PERSQUEUE INFO: partition_mlp.cpp:176: [72075186224037907][Partition][0][StateIdle] Creating MLP consumer 'ydb-sqs-consumer' 2026-02-19T08:19:41.266401Z node 7 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:19:41.266435Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.266452Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.266464Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.266486Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.266497Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.266525Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:19:41.267046Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ExistingTopicNoConsumerRetention" Version: 2 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/ExistingTopicNoConsumerRetention" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037907 } Consumers { Name: "other-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_STREAMING } Consumers { Name: "ydb-sqs-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 2 AvailabilityPeriodMs: 172800000 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 1000 DefaultProcessingTimeoutSeconds: 30 } MonitoringProjectId: "" 2026-02-19T08:19:41.267078Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-02-19T08:19:41.267604Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:98: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerStatus Consumer: "ydb-sqs-consumer" PartitionId: 0 Messages: 0 UseForReading: false 2026-02-19T08:19:41.267662Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:5448: [PQ: 72075186224037907] Handle TEvPQ::TEvMLPConsumerStatus Consumer: "ydb-sqs-consumer" PartitionId: 0 Messages: 0 UseForReading: false Generation: 1 Cookie: 1 2026-02-19T08:19:41.281670Z node 7 :SQS INFO: http_req.cpp:1237: Request [d0b108d3-b7a76158-ac79af45-8573cc2e] Request done. Api [SqsTopic] Action [CreateQueue] User [AKIDEXAMPLE] Database [/Root] IP [5896:fde9:b67b:0:4096:fde9:b67b:0] Duration [104ms] Code [200] 2026-02-19T08:19:41.281723Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [CreateQueue] requestId [d0b108d3-b7a76158-ac79af45-8573cc2e] reply ok 2026-02-19T08:19:41.281984Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:53988) <- (200 , 124 bytes) 2026-02-19T08:19:41.282075Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:53988) connection closed Http output full {"QueueUrl":"http://ghrun-fkrokuvqke.auto.internal:8771/v1/5//Root/32/ExistingTopicNoConsumerRetention/16/ydb-sqs-consumer"} 2026-02-19T08:19:41.287076Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:53992) incoming connection opened 2026-02-19T08:19:41.287149Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:53992) -> (POST /Root, 150 bytes) 2026-02-19T08:19:41.287324Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1891:a0e9:b67b:0:91:a0e9:b67b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 2ac75ebb-7c552444-d9429621-903efa43 2026-02-19T08:19:41.287809Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [2ac75ebb-7c552444-d9429621-903efa43] got new request from [1891:a0e9:b67b:0:91:a0e9:b67b:0] database '/Root' stream '' 2026-02-19T08:19:41.290014Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [2ac75ebb-7c552444-d9429621-903efa43] [auth] Authorized successfully 2026-02-19T08:19:41.290078Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [2ac75ebb-7c552444-d9429621-903efa43] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:19:41.291609Z node 7 :SQS INFO: http_req.cpp:1237: Request [2ac75ebb-7c552444-d9429621-903efa43] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExistingTopicNoConsumerRetention] IP [1891:a0e9:b67b:0:91:a0e9:b67b:0] Duration [6ms] Code [200] 2026-02-19T08:19:41.291640Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [GetQueueAttributes] requestId [2ac75ebb-7c552444-d9429621-903efa43] reply ok 2026-02-19T08:19:41.291919Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:53992) <- (200 , 50 bytes) 2026-02-19T08:19:41.291996Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:53992) connection closed Http output full {"Attributes":{"MessageRetentionPeriod":"172800"}} 2026-02-19T08:19:41.341321Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.341359Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.341374Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.341393Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.341417Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.445860Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.445899Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.445913Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.445944Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:41.445957Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestCreateQueueWithAttributes [GOOD] Test command err: 2026-02-19T08:18:13.355966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487722692369533:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:13.356646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:13.769769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:13.932315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:13.932454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:13.938812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:13.989877Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487722692369495:2081] 1771489093352701 != 1771489093352704 2026-02-19T08:18:13.994833Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:14.056069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:14.156751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:14.156784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:14.156805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:14.156897Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:14.380695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:14.421228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:14.434330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:18:14.857260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:14.875793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:14.964269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.157807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.224058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-02-19T08:18:15.229042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.301132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-02-19T08:18:15.306283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.365428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.417733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.485439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.541772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.630749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.678083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.840913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487739872240347:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.841070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.841632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487739872240359:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.841698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487739872240360:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.841848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.846692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:17.868464Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487739872240363:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:18:17.960026Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487739872240414:3109] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:18.356015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487722692369533:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:18.356104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:18.484316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:18.519487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:18.564513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx ... 37907] no config, start with empty partitions and default config 2026-02-19T08:19:41.986973Z node 7 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037907] doesn't have tx writes info 2026-02-19T08:19:41.988163Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037907] server connected, pipe [7:7608488101116183506:3646], now have 1 active actors on pipe 2026-02-19T08:19:41.988180Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037907] server connected, pipe [7:7608488101116183553:2478], now have 1 active actors on pipe 2026-02-19T08:19:41.988493Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:3668: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-02-19T08:19:41.989050Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:1060: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-02-19T08:19:41.990960Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037907] server connected, pipe [7:7608488101116183563:3677], now have 1 active actors on pipe 2026-02-19T08:19:41.991771Z node 7 :PERSQUEUE DEBUG: partition_init.cpp:86: [ExampleQueueName:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:19:41.991812Z node 7 :PERSQUEUE DEBUG: partition.cpp:1330: [72075186224037907][Partition][0][StateInit] HandleOnInit TEvPQ::TEvProposePartitionConfig 2026-02-19T08:19:41.992016Z node 7 :PERSQUEUE DEBUG: partition_init.cpp:86: [ExampleQueueName:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:19:41.992195Z node 7 :PERSQUEUE INFO: partition_init.cpp:1175: [72075186224037907][Partition][0][StateInit] bootstrapping 0 [7:7608488101116183567:2479] 2026-02-19T08:19:41.992989Z node 7 :PERSQUEUE DEBUG: partition_init.cpp:86: [ExampleQueueName:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:19:41.993008Z node 7 :PERSQUEUE DEBUG: partition_init.cpp:66: [ExampleQueueName:0:Initializer] Initializing completed. 2026-02-19T08:19:41.993026Z node 7 :PERSQUEUE INFO: partition.cpp:713: [72075186224037907][Partition][0][StateInit] init complete for topic 'ExampleQueueName' partition 0 generation 1 [7:7608488101116183567:2479] 2026-02-19T08:19:41.993050Z node 7 :PERSQUEUE DEBUG: partition.cpp:733: [72075186224037907][Partition][0][StateInit] SYNC INIT topic ExampleQueueName partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:19:41.993082Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-02-19T08:19:41.993093Z node 7 :PERSQUEUE INFO: partition_mlp.cpp:176: [72075186224037907][Partition][0][StateIdle] Creating MLP consumer 'ydb-sqs-consumer' 2026-02-19T08:19:41.993645Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'ExampleQueueName' partition 0 user ydb-sqs-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:19:41.993669Z node 7 :PERSQUEUE DEBUG: partition.cpp:4506: [72075186224037907][Partition][0][StateIdle] Process pending events. Count 1 2026-02-19T08:19:41.993693Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.993729Z node 7 :PERSQUEUE DEBUG: partition.cpp:2445: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-02-19T08:19:41.993764Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.993776Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.993790Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.993804Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.993828Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.993842Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.993852Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.993863Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.993870Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.993892Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'ExampleQueueName' partition 0 user ydb-sqs-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:19:41.993917Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72075186224037907][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:19:41.998680Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:19:41.998834Z node 7 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771489182036, TxId 281474976715690 2026-02-19T08:19:41.998863Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:41.998876Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:19:41.998887Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:41.998913Z node 7 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:19:41.999072Z node 7 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976715690 2026-02-19T08:19:41.999097Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:19:41.999109Z node 7 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:19:41.999123Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:41.999392Z node 7 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:19:42.000280Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:98: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerStatus Consumer: "ydb-sqs-consumer" PartitionId: 0 Messages: 0 UseForReading: false 2026-02-19T08:19:42.000656Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:5448: [PQ: 72075186224037907] Handle TEvPQ::TEvMLPConsumerStatus Consumer: "ydb-sqs-consumer" PartitionId: 0 Messages: 0 UseForReading: false Generation: 1 Cookie: 1 2026-02-19T08:19:42.001200Z node 7 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:19:42.001473Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-02-19T08:19:42.001494Z node 7 :PERSQUEUE INFO: partition_mlp.cpp:156: [72075186224037907][Partition][0][StateIdle] Updating MLP consumer 'ydb-sqs-consumer' config 2026-02-19T08:19:42.001689Z node 7 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:19:42.001727Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:42.001742Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:42.001754Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:42.001770Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:42.001782Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:19:42.001802Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:19:42.002218Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ExampleQueueName" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { Ids: 0 Codecs: "raw" } TopicPath: "/Root/ExampleQueueName" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "ydb-sqs-consumer" Generation: 0 AvailabilityPeriodMs: 3600000 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DefaultProcessingTimeoutSeconds: 30 } MonitoringProjectId: "" 2026-02-19T08:19:42.002292Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-02-19T08:19:42.010695Z node 7 :SQS INFO: http_req.cpp:1237: Request [6149110b-39bd459e-d7e350b6-6890bb57] Request done. Api [SqsTopic] Action [CreateQueue] User [AKIDEXAMPLE] Database [/Root] IP [b88c:8698:4f7c:0:a08c:8698:4f7c:0] Duration [86ms] Code [200] 2026-02-19T08:19:42.010731Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [CreateQueue] requestId [6149110b-39bd459e-d7e350b6-6890bb57] reply ok 2026-02-19T08:19:42.011005Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:45996) <- (200 , 108 bytes) 2026-02-19T08:19:42.011109Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:45996) connection closed Http output full {"QueueUrl":"http://ghrun-fkrokuvqke.auto.internal:8771/v1/5//Root/16/ExampleQueueName/16/ydb-sqs-consumer"} 2026-02-19T08:19:42.088399Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:42.088439Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:42.088463Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:42.088482Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:42.088510Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2026-02-19T08:19:38.227275Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488085473681952:2201];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:38.227772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:38.611414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:38.664923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:38.665099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:38.738508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:38.765136Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:38.947019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:38.992368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:38.992393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:38.992400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:38.992510Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:39.217323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:39.225443Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:39.584739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:19:39.690516Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7608488089768650224:2306], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:39530" } 2026-02-19T08:19:39.690555Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-02-19T08:19:39.690574Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:39.690584Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:39.690724Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:39530" 2026-02-19T08:19:39.690913Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1771489179689642) 2026-02-19T08:19:39.746585Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1771489179689642 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-02-19T08:19:39.746837Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-02-19T08:19:39.756854Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-02-19T08:19:39.758101Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489179689642&action=1" } } } 2026-02-19T08:19:39.758264Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:39.758334Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:39.758492Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:39.759026Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-02-19T08:19:39.759174Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-02-19T08:19:39.765151Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488089768650234:2307], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489179689642&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2026-02-19T08:19:39.765183Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:39.765358Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489179689642&action=1" } } 2026-02-19T08:19:39.767634Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-02-19T08:19:39.767683Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:39.767758Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7608488089768650229:2206], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:39.767785Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:39.767798Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:39.767806Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:39.767893Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-02-19T08:19:39.767911Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-02-19T08:19:39.767989Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-02-19T08:19:39.771162Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:39.771197Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:39.771205Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:39.771212Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:39.771277Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-02-19T08:19:39.771297Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1771489179689642 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:39.773759Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:39.801663Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:39.805094Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-02-19T08:19:39.805109Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-02-19T08:19:39.831945Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:39530" 2026-02-19T08:19:39.832260Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488089768650293:2310], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489179689642&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2026-02-19T08:19:39.832282Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:39.832446Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGet ... tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-02-19T08:19:40.778315Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found - using supplied 72075186224037891 2026-02-19T08:19:40.778359Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found - using supplied 72075186224037892 2026-02-19T08:19:40.778436Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found - using supplied 72075186224037895 2026-02-19T08:19:40.778492Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found - using supplied 72075186224037889 2026-02-19T08:19:40.778526Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-02-19T08:19:40.779356Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2026-02-19T08:19:40.779375Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-02-19T08:19:40.779497Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:40.779640Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7608488094063618196:2206], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:40.779659Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:40.779673Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:40.779683Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:40.779715Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-02-19T08:19:40.779767Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1771489180731307 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.779834Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489180731307 issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.781027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-02-19T08:19:40.781083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-02-19T08:19:40.781123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-02-19T08:19:40.781167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-02-19T08:19:40.781234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-02-19T08:19:40.782272Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-02-19T08:19:40.784293Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-02-19T08:19:40.784384Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-02-19T08:19:40.784409Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:40.785145Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488085473682151:2205], Recipient [1:7608488085473682263:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:40.785169Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:40.785193Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:40.785207Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:40.785233Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-02-19T08:19:40.785261Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1771489180731307 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.788478Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:40.788533Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:40.788565Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:40.788715Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:40.789242Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2026-02-19T08:19:40.789363Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2026-02-19T08:19:40.793127Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2026-02-19T08:19:40.793245Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7608488094063618282:2206], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:40.793293Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:40.793316Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:40.793327Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:40.793360Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-02-19T08:19:40.793378Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-02-19T08:19:40.801208Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:40.801286Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:40.801295Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:40.801302Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:40.801504Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1771489180731307 2026-02-19T08:19:40.801532Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489180731307 issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.801548Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1771489180731307 issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.801569Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-02-19T08:19:40.801671Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1771489180731307 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-02-19T08:19:40.805153Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488094063618298:2375], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489180731307&action=2" } UserToken: "" } 2026-02-19T08:19:40.805178Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:40.809016Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489180731307&action=2" } } 2026-02-19T08:19:40.813655Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-02-19T08:19:40.813725Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:40.863320Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488094063618306:2377], Recipient [1:7608488085473682263:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489180731307&action=2" } UserToken: "" } 2026-02-19T08:19:40.863349Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:40.863490Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489180731307&action=2" ready: true status: SUCCESS } } 2026-02-19T08:19:40.866728Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:19:40.866950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:19:40.929560Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 31715, msgbus: 13767 2026-02-19T08:13:56.047979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486616542953994:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:56.048065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:56.091854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:13:56.357424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:13:56.402990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:56.403096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:56.473499Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:56.474553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:56.613998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:56.614024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:56.614038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:56.614133Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:56.624687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:13:56.828385Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486616542954202:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:13:56.828466Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486616542954970:2725] HANDLE EvNavigateScheme dc-1 2026-02-19T08:13:56.829027Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486616542954970:2725] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.854732Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486616542954970:2725] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:13:56.859258Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486616542954970:2725] Handle TEvDescribeSchemeResult Forward to# [1:7608486616542954969:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:13:56.861000Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486616542954202:2143] Handle TEvProposeTransaction 2026-02-19T08:13:56.861026Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486616542954202:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:13:56.861081Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486616542954202:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486616542954975:2730] 2026-02-19T08:13:56.957433Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486616542954975:2730] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:56.957534Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486616542954975:2730] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:13:56.957552Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486616542954975:2730] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:56.957638Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486616542954975:2730] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:56.957978Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486616542954975:2730] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.958115Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486616542954975:2730] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:13:56.958222Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486616542954975:2730] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:13:56.958371Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486616542954975:2730] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:13:56.959085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:56.961225Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486616542954975:2730] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:13:56.961295Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486616542954975:2730] txid# 281474976710657 SEND to# [1:7608486616542954974:2729] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:13:56.976286Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486616542954202:2143] Handle TEvProposeTransaction 2026-02-19T08:13:56.976312Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486616542954202:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:13:56.976355Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486616542954202:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486616542955014:2766] 2026-02-19T08:13:56.978130Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486616542955014:2766] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:56.978189Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486616542955014:2766] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:13:56.978204Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486616542955014:2766] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:56.978261Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486616542955014:2766] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:56.978639Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486616542955014:2766] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.978751Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486616542955014:2766] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:13:56.978841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486616542955014:2766] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:13:56.978997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486616542955014:2766] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:13:56.979548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:13:56.984390Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486616542955014:2766] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:13:56.984437Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486616542955014:2766] txid# 281474976710658 SEND to# [1:7608486616542955013:2765] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:13:57.072383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NA ... cCI6MTc3MTUzMjM3MiwiaWF0IjoxNzcxNDg5MTcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60896" 2026-02-19T08:19:33.524714Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488064139691439:3033] txid# 281474976710666 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:33.524736Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488064139691439:3033] txid# 281474976710666 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:33.524903Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488064139691439:3033] txid# 281474976710666 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:33.524970Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488064139691439:3033] txid# 281474976710666 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:33.525017Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488064139691439:3033] txid# 281474976710666 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:33.525291Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488064139691439:3033] txid# 281474976710666 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:33.525318Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488064139691439:3033] txid# 281474976710666, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:33.525409Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488064139691439:3033] txid# 281474976710666, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:33.525437Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488064139691439:3033] txid# 281474976710666 SEND to# [59:7608488064139691438:2361] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:33.525893Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZmQ4OTQyMGUtZTlkMDIwZmMtZDc2YjViM2MtYmYxMDg1NjY=, ActorId: [59:7608488059844724104:2361], ActorState: ExecuteState, LegacyTraceId: 01khtfq7zp31945gtcd2h01hpg, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:33.528219Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488034074918947:2112] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:33.528250Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488034074918947:2112] TxId# 281474976710667 ProcessProposeKqpTransaction 2026-02-19T08:19:35.905552Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488034074918947:2112] Handle TEvProposeTransaction 2026-02-19T08:19:35.905585Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488034074918947:2112] TxId# 281474976710668 ProcessProposeTransaction 2026-02-19T08:19:35.905631Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488034074918947:2112] Cookie# 0 userReqId# "" txid# 281474976710668 SEND to# [59:7608488072729626080:3054] 2026-02-19T08:19:35.907651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488072729626080:3054] txid# 281474976710668 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM3MiwiaWF0IjoxNzcxNDg5MTcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.hQAdaVlKqe32tm0KpDCfJ2-lBPb3h__bma8Pz_56b5f5huoocgs5Lyc95UUH-Dz2Fb_gkPMPl1IscZcwGQnWcflDsVCpJFR_UQ4quhP5pSSJqrHKt5UOD1zkvHbUP0kG7qILk32F1VAYPu54_laCSfWGGLXkFnG-ew_MJk21rKAWDJp1U32w0RFP83k1boGlyUE9lZAzi6-kU2_Za0U4-oUOa84Ig04oQrH-AltohmABtaKlN2o_Li_oq0eAX7yVtd3gDntPvUDGHKZd1pqdf2PUm-MAMhpKhCXpPKTKIYUYX3jYoCjiskBdxYpOKpiiLE78oxGg_NvVrMdDGMi_Bw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM3MiwiaWF0IjoxNzcxNDg5MTcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:59592" 2026-02-19T08:19:35.907739Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488072729626080:3054] txid# 281474976710668 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:35.907760Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488072729626080:3054] txid# 281474976710668 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:35.907916Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488072729626080:3054] txid# 281474976710668 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:35.907980Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488072729626080:3054] txid# 281474976710668 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:35.908028Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488072729626080:3054] txid# 281474976710668 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:35.908312Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488072729626080:3054] txid# 281474976710668 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:35.908339Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488072729626080:3054] txid# 281474976710668, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:35.908429Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488072729626080:3054] txid# 281474976710668, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:35.908456Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488072729626080:3054] txid# 281474976710668 SEND to# [59:7608488072729626079:2361] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:35.908889Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZmQ4OTQyMGUtZTlkMDIwZmMtZDc2YjViM2MtYmYxMDg1NjY=, ActorId: [59:7608488059844724104:2361], ActorState: ExecuteState, LegacyTraceId: 01khtfqaa53m98hn6gmw51hvdj, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:35.909117Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488034074918947:2112] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:35.909143Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488034074918947:2112] TxId# 281474976710669 ProcessProposeKqpTransaction 2026-02-19T08:19:42.092344Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:42.092376Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:42.583219Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488034074918947:2112] Handle TEvProposeTransaction 2026-02-19T08:19:42.583267Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488034074918947:2112] TxId# 281474976710670 ProcessProposeTransaction 2026-02-19T08:19:42.583309Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488034074918947:2112] Cookie# 0 userReqId# "" txid# 281474976710670 SEND to# [59:7608488102794397279:3113] 2026-02-19T08:19:42.585538Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488102794397279:3113] txid# 281474976710670 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM3MiwiaWF0IjoxNzcxNDg5MTcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.hQAdaVlKqe32tm0KpDCfJ2-lBPb3h__bma8Pz_56b5f5huoocgs5Lyc95UUH-Dz2Fb_gkPMPl1IscZcwGQnWcflDsVCpJFR_UQ4quhP5pSSJqrHKt5UOD1zkvHbUP0kG7qILk32F1VAYPu54_laCSfWGGLXkFnG-ew_MJk21rKAWDJp1U32w0RFP83k1boGlyUE9lZAzi6-kU2_Za0U4-oUOa84Ig04oQrH-AltohmABtaKlN2o_Li_oq0eAX7yVtd3gDntPvUDGHKZd1pqdf2PUm-MAMhpKhCXpPKTKIYUYX3jYoCjiskBdxYpOKpiiLE78oxGg_NvVrMdDGMi_Bw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM3MiwiaWF0IjoxNzcxNDg5MTcyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:59592" 2026-02-19T08:19:42.585612Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488102794397279:3113] txid# 281474976710670 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:42.585631Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488102794397279:3113] txid# 281474976710670 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:42.585764Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488102794397279:3113] txid# 281474976710670 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:42.585819Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488102794397279:3113] txid# 281474976710670 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:42.585857Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488102794397279:3113] txid# 281474976710670 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:42.586123Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488102794397279:3113] txid# 281474976710670 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:42.586146Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488102794397279:3113] txid# 281474976710670, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:42.586226Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488102794397279:3113] txid# 281474976710670, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:42.586250Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488102794397279:3113] txid# 281474976710670 SEND to# [59:7608488102794397278:2361] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:42.586667Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZmQ4OTQyMGUtZTlkMDIwZmMtZDc2YjViM2MtYmYxMDg1NjY=, ActorId: [59:7608488059844724104:2361], ActorState: ExecuteState, LegacyTraceId: 01khtfqgv7d3hsjsp42g8qhdj7, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:42.587266Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488034074918947:2112] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:42.587283Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488034074918947:2112] TxId# 281474976710671 ProcessProposeKqpTransaction >> TBackupCollectionTests::DropCollectionRollbackOnFailure [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases >> TGRpcCmsTest::SimpleTenantsTest |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_s3_writer/ydb-core-tx-replication-service-ut_s3_writer |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_s3_writer/ydb-core-tx-replication-service-ut_s3_writer |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_s3_writer/ydb-core-tx-replication-service-ut_s3_writer >> KqpStats::MultiTxStatsFullExpYql >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |91.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2026-02-19T08:19:16.884103Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487993023874090:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:16.884200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:16.932222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:17.282315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:17.336971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:17.340047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:17.352323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:17.481658Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:17.543688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:17.904607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:17.978291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:17.978316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:17.978324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:17.978417Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:18.310493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:21.196275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498711830:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.196389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.196745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498711840:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.196806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.416302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:21.609295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498711998:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.609362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.609568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712000:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.609601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.631793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-02-19T08:19:21.751816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712100:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.751944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.752175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712103:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.752217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712106:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712129:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712130:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712131:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.756887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.757026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712133:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.757066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712132:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.757094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712135:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.757127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712134:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.757196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712136:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.762365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712162:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.762455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712165:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.762544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488014498712170:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:21.762690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed ... Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2287: Extended key access sampling at datashard: 72075186224037888 2026-02-19T08:19:41.590488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 0 row count 0 buckets 0 ready 1 2026-02-19T08:19:41.590542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 0, key access buckets 100 2026-02-19T08:19:41.590619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:490: TTxPartitionHistogram Want to Split by load: split by load (rowCount: 0, minRowCount: 0, shardSize: 0, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 82782, cpuUsageThreshold: 50000) tablet 72075186224037888 2026-02-19T08:19:41.591384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:555: TTxPartitionHistogram Propose Split by load: split by load (rowCount: 0, minRowCount: 0, shardSize: 0, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 82782, cpuUsageThreshold: 50000) tablet 72075186224037888 request Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000F\234\266y\000\000\000\200" } SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715693 TabletId: 72057594046644480 2026-02-19T08:19:41.591622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000F\234\266y\000\000\000\200" } SchemeshardId: 72057594046644480 2026-02-19T08:19:41.592242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000F\234\266y\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000F\234\266y\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000F\234\266y\000\000\000\200" } SchemeshardId: 72057594046644480 2026-02-19T08:19:41.592281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:41.596771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:19:41.604353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:19:41.604632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:19:41.604697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715693:0 CreateParts -> ConfigureParts 2026-02-19T08:19:41.608972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:19:41.615308Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7608488100398131740:9784] 2026-02-19T08:19:41.619078Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7608488100398131741:9785] 2026-02-19T08:19:41.648365Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:19:41.648459Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:19:41.648603Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:19:41.648624Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-02-19T08:19:41.648650Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:19:41.648761Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:41.660069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2026-02-19T08:19:41.660357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2026-02-19T08:19:41.660389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715693:0 ConfigureParts -> TransferData 2026-02-19T08:19:41.662561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:19:41.687684Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-02-19T08:19:41.687821Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:41.687878Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:19:41.687887Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-02-19T08:19:41.687906Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037889 2026-02-19T08:19:41.687962Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:19:41.687998Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:19:41.688018Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037890 2026-02-19T08:19:41.688182Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:19:41.688229Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-02-19T08:19:41.691300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2026-02-19T08:19:41.691662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715693:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:19:41.693514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:19:41.693812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:19:41.693893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:19:41.694891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-02-19T08:19:41.694936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-02-19T08:19:41.694953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-02-19T08:19:41.704992Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-02-19T08:19:41.705011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-02-19T08:19:41.705128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-02-19T08:19:41.705155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-02-19T08:19:41.705206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-02-19T08:19:41.707987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-02-19T08:19:41.710509Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:19:41.712714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:19:41.719762Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:19:41.720557Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:19:41.721301Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:19:41.721413Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:19:41.723302Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1114: TxId: 281474976722964, task: 1, CA Id [1:7608488100398132083:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 Table has 2 shards >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> TBackupCollectionTests::DropCollectionValidationCases [GOOD] >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> KqpStats::SysViewClientLost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 64956, msgbus: 7517 2026-02-19T08:14:14.998852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486694069592781:2204];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:14.999371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:15.228910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:15.264487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:15.264619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:15.316610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:15.334466Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:15.452792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:15.452840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:15.452852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:15.452976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:15.472301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:15.704908Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486698364560151:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:15.704984Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486698364560904:2710] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:15.705465Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486698364560904:2710] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.744387Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486698364560904:2710] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:15.750548Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486698364560904:2710] Handle TEvDescribeSchemeResult Forward to# [1:7608486698364560903:2709] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:15.752271Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486698364560151:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.752305Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486698364560151:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:14:15.752378Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486698364560151:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608486698364560909:2715] 2026-02-19T08:14:15.838345Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486698364560909:2715] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:15.838465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486698364560909:2715] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:15.838487Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486698364560909:2715] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:15.838570Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486698364560909:2715] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:15.838883Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486698364560909:2715] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.839021Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486698364560909:2715] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:15.839105Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486698364560909:2715] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:14:15.839248Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486698364560909:2715] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:14:15.839992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:15.843356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486698364560909:2715] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:14:15.843405Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486698364560909:2715] txid# 281474976715657 SEND to# [1:7608486698364560908:2714] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:14:15.855689Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486698364560151:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.855724Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486698364560151:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-02-19T08:14:15.855752Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486698364560151:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7608486698364560948:2751] 2026-02-19T08:14:15.857260Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486698364560948:2751] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:15.857311Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486698364560948:2751] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:15.857323Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486698364560948:2751] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:15.857377Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486698364560948:2751] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:15.857702Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486698364560948:2751] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.857862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486698364560948:2751] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:15.857955Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486698364560948:2751] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-02-19T08:14:15.858090Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486698364560948:2751] txid# 281474976715658 HANDLE EvClientConnected 2026-02-19T08:14:15.858708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:15.861335Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486698364560948:2751] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-02-19T08:14:15.861375Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486698364560948:2751] txid# 281474976715658 SEND to# [1:7608486698364560947:2750] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-02-19T08:14:15.892284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486698364560151:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.892319Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486698364560151:2143] TxId# 281474976715659 ProcessProposeTransaction 2026-02-19T08:14:15.892362Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1 ... _PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488035696162914:2141] TxId# 281474976710664 ProcessProposeKqpTransaction 2026-02-19T08:19:33.038401Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488035696162914:2141] Handle TEvProposeTransaction 2026-02-19T08:19:33.038440Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488035696162914:2141] TxId# 281474976710665 ProcessProposeTransaction 2026-02-19T08:19:33.038492Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488035696162914:2141] Cookie# 0 userReqId# "" txid# 281474976710665 SEND to# [59:7608488065760935213:3004] 2026-02-19T08:19:33.040285Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488065760935213:3004] txid# 281474976710665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:42732" 2026-02-19T08:19:33.040343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488065760935213:3004] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:33.040357Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488065760935213:3004] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:33.040479Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488065760935213:3004] txid# 281474976710665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:33.040511Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488065760935213:3004] txid# 281474976710665 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:33.040544Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488065760935213:3004] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:33.040730Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488065760935213:3004] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:33.040746Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488065760935213:3004] txid# 281474976710665, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:33.040811Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488065760935213:3004] txid# 281474976710665, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:33.040830Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488065760935213:3004] txid# 281474976710665 SEND to# [59:7608488065760935212:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:33.041215Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZjM5M2E2YjQtYmI3NTA2MDUtZGMzMjA2MTItMmIzYzQ3ZjE=, ActorId: [59:7608488057171000574:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfq7gm0k9z07hg5wptdh8p, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:33.041556Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488035696162914:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:33.041581Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488035696162914:2141] TxId# 281474976710666 ProcessProposeKqpTransaction 2026-02-19T08:19:37.077808Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488035696162914:2141] Handle TEvProposeTransaction 2026-02-19T08:19:37.077855Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488035696162914:2141] TxId# 281474976710667 ProcessProposeTransaction 2026-02-19T08:19:37.077901Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488035696162914:2141] Cookie# 0 userReqId# "" txid# 281474976710667 SEND to# [59:7608488082940804478:3041] 2026-02-19T08:19:37.080192Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488082940804478:3041] txid# 281474976710667 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:42732" 2026-02-19T08:19:37.080270Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488082940804478:3041] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:37.080291Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488082940804478:3041] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:37.080443Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488082940804478:3041] txid# 281474976710667 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:37.080490Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488082940804478:3041] txid# 281474976710667 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:37.080541Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488082940804478:3041] txid# 281474976710667 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:37.080829Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488082940804478:3041] txid# 281474976710667 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:37.080873Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488082940804478:3041] txid# 281474976710667, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:37.080975Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488082940804478:3041] txid# 281474976710667, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:37.081005Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488082940804478:3041] txid# 281474976710667 SEND to# [59:7608488082940804477:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:37.081543Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZjM5M2E2YjQtYmI3NTA2MDUtZGMzMjA2MTItMmIzYzQ3ZjE=, ActorId: [59:7608488057171000574:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfqbe25degtc4tyb8a2t9n, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:37.082190Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488035696162914:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:37.082216Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488035696162914:2141] TxId# 281474976710668 ProcessProposeKqpTransaction 2026-02-19T08:19:41.329284Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:41.329315Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:44.078238Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488035696162914:2141] Handle TEvProposeTransaction 2026-02-19T08:19:44.078276Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488035696162914:2141] TxId# 281474976710669 ProcessProposeTransaction 2026-02-19T08:19:44.078334Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488035696162914:2141] Cookie# 0 userReqId# "" txid# 281474976710669 SEND to# [59:7608488113005575679:3100] 2026-02-19T08:19:44.080539Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488113005575679:3100] txid# 281474976710669 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:42732" 2026-02-19T08:19:44.080632Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488113005575679:3100] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:44.080652Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488113005575679:3100] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-02-19T08:19:44.080811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488113005575679:3100] txid# 281474976710669 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:44.080857Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488113005575679:3100] txid# 281474976710669 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:44.080907Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488113005575679:3100] txid# 281474976710669 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:44.081185Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488113005575679:3100] txid# 281474976710669 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:44.081211Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488113005575679:3100] txid# 281474976710669, Access denied for ordinaryuser@builtin, attempt to manage user 2026-02-19T08:19:44.081310Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488113005575679:3100] txid# 281474976710669, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-02-19T08:19:44.081337Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488113005575679:3100] txid# 281474976710669 SEND to# [59:7608488113005575678:2351] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:44.082151Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=ZjM5M2E2YjQtYmI3NTA2MDUtZGMzMjA2MTItMmIzYzQ3ZjE=, ActorId: [59:7608488057171000574:2351], ActorState: ExecuteState, LegacyTraceId: 01khtfqj9h38r35b4ykrdfbwr2, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:44.082440Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488035696162914:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:44.082468Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488035696162914:2141] TxId# 281474976710670 ProcessProposeKqpTransaction >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn >> TGRpcCmsTest::DisabledTxTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> KqpStats::DataQueryWithEffects+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2026-02-19T08:19:40.786917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488095011653045:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:40.787665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:41.137805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:41.335277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:41.335404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:41.360491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:41.475120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:41.477580Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:41.738332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:41.738357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:41.738364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:41.738449Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:41.804249Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:42.119020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:42.231696Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7608488103601588663:2306], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:54036" } 2026-02-19T08:19:42.231750Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-02-19T08:19:42.231776Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.231788Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.231911Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:54036" 2026-02-19T08:19:42.232085Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1771489182229423) 2026-02-19T08:19:42.285554Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1771489182229423 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-02-19T08:19:42.302099Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-02-19T08:19:42.318286Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-02-19T08:19:42.319525Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182229423&action=1" } } } 2026-02-19T08:19:42.319689Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.319762Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:42.319902Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:42.320367Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-02-19T08:19:42.320493Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-02-19T08:19:42.338023Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-02-19T08:19:42.338079Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:42.338169Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7608488103601588668:2210], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:42.338196Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:42.338211Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.338219Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.338272Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-02-19T08:19:42.338299Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-02-19T08:19:42.338364Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-02-19T08:19:42.338739Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488103601588673:2307], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182229423&action=1" } UserToken: "" } 2026-02-19T08:19:42.338769Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:42.338984Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182229423&action=1" } } 2026-02-19T08:19:42.355108Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:42.355143Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.355179Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.355191Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.355277Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-02-19T08:19:42.355301Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1771489182229423 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:42.359535Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:42.379474Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.379995Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-02-19T08:19:42.380007Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-02-19T08:19:42.387499Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:54036" 2026-02-19T08:19:42.388922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:19:42.392237Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-02-19T08:19:42.392312Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-02-19T08:19:42.398129Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488103601588734:2310], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182229423&action=1" } UserToken: "" } 2026-02-19T08:19:42.398154Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:42.398294Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182229423&action=1" ... user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2026-02-19T08:19:42.505777Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7608488103601588771:2210], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-02-19T08:19:42.505796Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-02-19T08:19:42.505807Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3652: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2026-02-19T08:19:42.510493Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488103601588864:2317], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" } UserToken: "" } 2026-02-19T08:19:42.510529Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:42.511180Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" } } 2026-02-19T08:19:42.523096Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2026-02-19T08:19:42.546242Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-02-19T08:19:42.546341Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-02-19T08:19:42.546379Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-02-19T08:19:42.546465Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-02-19T08:19:42.546615Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2026-02-19T08:19:42.546644Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-02-19T08:19:42.546671Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-02-19T08:19:42.546706Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2026-02-19T08:19:42.546742Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-02-19T08:19:42.557654Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2026-02-19T08:19:42.557677Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-02-19T08:19:42.557732Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:42.557841Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7608488103601588849:2210], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:42.557858Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:42.557874Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.557887Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.557917Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-02-19T08:19:42.557941Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1771489182483875 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:42.557993Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489182483875 issue= 2026-02-19T08:19:42.567056Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-02-19T08:19:42.567137Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-02-19T08:19:42.567150Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.567593Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488099306620667:2209], Recipient [1:7608488099306620797:2210]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:42.567618Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:42.567647Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.567657Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.567687Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-02-19T08:19:42.567708Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1771489182483875 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:42.570841Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488103601588967:2319], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" } UserToken: "" } 2026-02-19T08:19:42.570861Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:42.571048Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" } } 2026-02-19T08:19:42.586582Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:42.586650Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.586686Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:42.586840Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:42.587510Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-02-19T08:19:42.587589Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-02-19T08:19:42.596218Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-02-19T08:19:42.596293Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7608488103601588982:2210], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:42.596318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:42.596328Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.596335Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.596378Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-02-19T08:19:42.596397Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-02-19T08:19:42.601513Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:42.601547Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:42.601554Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.601560Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:42.601619Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1771489182483875 2026-02-19T08:19:42.601629Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489182483875 issue= 2026-02-19T08:19:42.601641Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1771489182483875 issue= 2026-02-19T08:19:42.601656Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-02-19T08:19:42.601777Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1771489182483875 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:42.609204Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-02-19T08:19:42.609307Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:42.627582Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488103601588999:2322], Recipient [1:7608488099306620797:2210]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" } UserToken: "" } 2026-02-19T08:19:42.627609Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:42.627796Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489182483875&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2026-02-19T08:19:19.223779Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488006350942722:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:19.223891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:19.269830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:19.657823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:19.694033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:19.695263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:19.743092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:19.804548Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:20.068212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:20.150147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:20.150176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:20.150183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:20.150254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:20.234099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:20.532307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:23.390424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023530813138:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.390543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.397193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023530813148:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.397310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.644125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:23.919419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023530813302:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.919513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.920071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023530813305:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.920153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.971729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-02-19T08:19:24.225253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488006350942722:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:24.225329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:24.388641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780738:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.389923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780735:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.390012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780739:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.390058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780725:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.390442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.400454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780747:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.408275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780749:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.411633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2026-02-19T08:19:24.411828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715662:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.411847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2026-02-19T08:19:24.411922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715662:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.411953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715662:2, at schemeshard: 72057594046644480 2026-02-19T08:19:24.412040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715662:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.412097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715662:3, path# /Root/.metadata/workload_manager/pools/default 2026-02-19T08:19:24.412362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715662:3 Waiting -> Propose 2026-02-19T08:19:24.412609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715662:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.412646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:24.427219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715663:0, path# /Root/.metadata/workload_manager/pools/default 2026-02-19T08:19:24.433057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.434896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027825780803:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.434954Z node 1 :KQP_WORKLOAD_SERVICE WARN: sc ... .160071Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:7608488113725181878:2378], SessionActorId: [1:7608488027825780718:2378], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7608488027825780718:2378]. 2026-02-19T08:19:44.160235Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=OTgzNjY5MDYtMmE3ZDIwMzAtMjFlNWFhZjgtYWI1NzljZjY=, ActorId: [1:7608488027825780718:2378], ActorState: ExecuteState, LegacyTraceId: 01khtfqjc7agyb3zbrvtksxyff, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7608488113725181880:2378] from: [1:7608488113725181878:2378] trace_id# 2026-02-19T08:19:44.160315Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:7608488113725181880:2378] TxId: 281474976720602. Ctx: { TraceId: 01khtfqjc7agyb3zbrvtksxyff, Database: , SessionId: ydb://session/3?node_id=1&id=OTgzNjY5MDYtMmE3ZDIwMzAtMjFlNWFhZjgtYWI1NzljZjY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-02-19T08:19:44.163459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:19:44.163874Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=OTgzNjY5MDYtMmE3ZDIwMzAtMjFlNWFhZjgtYWI1NzljZjY=, ActorId: [1:7608488027825780718:2378], ActorState: ExecuteState, LegacyTraceId: 01khtfqjc7agyb3zbrvtksxyff, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-02-19T08:19:44.169653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:19:44.169764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:19:44.172671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-02-19T08:19:44.172717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-02-19T08:19:44.172733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-02-19T08:19:44.184412Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-02-19T08:19:44.186511Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3155: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-02-19T08:19:44.187731Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:836: SelfId: [1:7608488113725181934:2380], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7608488027825780720:2380]Got WRONG SHARD STATE for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7608488113725181934:2380].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2026-02-19T08:19:44.187829Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:7608488113725181903:2380], SessionActorId: [1:7608488027825780720:2380], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7608488027825780720:2380]. 2026-02-19T08:19:44.188008Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=ZmEwZDlmZjYtYjBjMTViOWQtZTk3NTgxOS0yMjQ2Y2E4ZQ==, ActorId: [1:7608488027825780720:2380], ActorState: ExecuteState, LegacyTraceId: 01khtfqjcg5pceksrn7jtwt2kd, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7608488113725181904:2380] from: [1:7608488113725181903:2380] trace_id# 2026-02-19T08:19:44.188086Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:7608488113725181904:2380] TxId: 281474976720605. Ctx: { TraceId: 01khtfqjcg5pceksrn7jtwt2kd, Database: , SessionId: ydb://session/3?node_id=1&id=ZmEwZDlmZjYtYjBjMTViOWQtZTk3NTgxOS0yMjQ2Y2E4ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-02-19T08:19:44.188305Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZmEwZDlmZjYtYjBjMTViOWQtZTk3NTgxOS0yMjQ2Y2E4ZQ==, ActorId: [1:7608488027825780720:2380], ActorState: ExecuteState, LegacyTraceId: 01khtfqjcg5pceksrn7jtwt2kd, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/Foo`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-02-19T08:19:44.189368Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-02-19T08:19:44.190361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-02-19T08:19:44.190439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:19:44.190462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:19:44.190506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-02-19T08:19:44.193147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 UNAVAILABLE:
:4:9: Warning: Symbol $timestamp is not used, code: 4527
:3:9: Warning: Symbol $version is not used, code: 4527
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 2026-02-19T08:19:44.199682Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:19:44.206435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:19:44.217308Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:19:44.217997Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:19:44.220467Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:19:44.220573Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:19:44.260260Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1318: SelfId: [1:7608488113725181889:2375], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7608488027825780714:2375]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-02-19T08:19:44.262893Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:7608488113725181869:2375], SessionActorId: [1:7608488027825780714:2375], statusCode=UNAVAILABLE. Issue=
: Error: Kikimr cluster or one of its subsystems was unavailable. Error writing to table `/Root/Foo`: can't deliver message to tablet 72075186224037888., code: 2005 . sessionActorId=[1:7608488027825780714:2375]. 2026-02-19T08:19:44.266686Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=NDliOGNhMTAtNjMxMWU1MDItNjAwOTIwNDgtNTMzZTRhMjY=, ActorId: [1:7608488027825780714:2375], ActorState: ExecuteState, LegacyTraceId: 01khtfqjc73m61nk978tyqvra6, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7608488113725181870:2375] from: [1:7608488113725181869:2375] trace_id# UNAVAILABLE:
:4:9: Warning: Symbol $timestamp is not used, code: 4527
:3:9: Warning: Symbol $version is not used, code: 4527
: Error: Kikimr cluster or one of its subsystems was unavailable. Error writing to table `/Root/Foo`: can't deliver message to tablet 72075186224037888., code: 2005 2026-02-19T08:19:44.266770Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:7608488113725181870:2375] TxId: 281474976720600. Ctx: { TraceId: 01khtfqjc73m61nk978tyqvra6, Database: , SessionId: ydb://session/3?node_id=1&id=NDliOGNhMTAtNjMxMWU1MDItNjAwOTIwNDgtNTMzZTRhMjY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Kikimr cluster or one of its subsystems was unavailable. Error writing to table `/Root/Foo`: can't deliver message to tablet 72075186224037888., code: 2005 } trace_id# 2026-02-19T08:19:44.266990Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NDliOGNhMTAtNjMxMWU1MDItNjAwOTIwNDgtNTMzZTRhMjY=, ActorId: [1:7608488027825780714:2375], ActorState: ExecuteState, LegacyTraceId: 01khtfqjc73m61nk978tyqvra6, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Kikimr cluster or one of its subsystems was unavailable. Error writing to table `/Root/Foo`: can\'t deliver message to tablet 72075186224037888." issue_code: 2005 severity: 1 } trace_id# Table has 2 shards >> TGRpcCmsTest::AuthTokenTest [GOOD] >> KqpExplain::Explain >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq [GOOD] >> IndexBuildTest::RejectsCancel >> TGRpcCmsTest::DescribeOptionsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2026-02-19T08:19:43.085614Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488110334885958:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:43.086186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:43.351714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:43.387942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:43.388075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:43.400918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:43.488768Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:43.529840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:43.619174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:43.619197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:43.619204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:43.619387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:44.101923Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:44.157027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:44.328441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2026-02-19T08:19:44.423577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: 2026-02-19T08:15:18.476539Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486971090598381:2154];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:18.476825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:18.865946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:18.880766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:18.885079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:18.949943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:19.002566Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:19.004868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486971090598254:2081] 1771488918463140 != 1771488918463143 2026-02-19T08:15:19.117383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:19.117405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:19.117411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:19.117502Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:19.137056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:19.478987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:19.621397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:19.631400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:15:19.687544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.865369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:20.042987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:20.151800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.722688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486988270469312:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.722795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.723387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486988270469322:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.723463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.126807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.170374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.222524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.298993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.340339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.412097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.475252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486971090598381:2154];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:23.475477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:23.490401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.602422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:23.758577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486992565437491:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.758689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.759616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486992565437496:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.759670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486992565437497:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.759797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:23.765122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:23.786896Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486992565437500:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:15:23.889961Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486992565437551:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:26.131244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... iptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:07.920825Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:07.934122Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608487694592298451:2081] 1771489087601665 != 1771489087601668 2026-02-19T08:18:07.987067Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:07.989117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:07.989196Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:07.993567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:08.309967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:08.310594Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:08.310606Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:08.310615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:08.310702Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:08.657140Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:09.222465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:09.249620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:09.349398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:09.537251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:09.684820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:12.642213Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487694592298547:2136];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:12.642292Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:14.109054Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487724657071408:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.109143Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.109637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487724657071418:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.109699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:14.284838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.358439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.414534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.486361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.598943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.689135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.781231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.882749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.021963Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487728952039614:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:15.022087Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:15.022600Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487728952039619:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:15.022668Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487728952039620:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:15.022812Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:15.027094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:15.049295Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487728952039623:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:18:15.103100Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487728952039676:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:18.165837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:22.985089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:18:22.985133Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2026-02-19T08:19:44.713402Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488112562519323:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:44.713470Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:45.091369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:45.126554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:45.126671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:45.147894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:45.191725Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:45.362378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:45.414434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:45.414463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:45.414470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:45.414552Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:45.706195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:45.753874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:45.958329Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2026-02-19T08:19:45.958383Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2026-02-19T08:19:45.962320Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2026-02-19T08:19:46.021870Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7608488121152454952:2307], Recipient [1:7608488112562519755:2198]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:59472" } 2026-02-19T08:19:46.021926Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2026-02-19T08:19:46.024650Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2026-02-19T08:19:41.586979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488101429475355:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:41.587028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:42.043914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:42.102028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:42.102139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:42.126536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:42.216691Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:42.282321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:42.528909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:42.528929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:42.528936Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:42.529014Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:42.595309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:42.913311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:43.128787Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7608488110019410783:2306], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:43.128844Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-02-19T08:19:43.128883Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:43.128894Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:43.129024Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" 2026-02-19T08:19:43.129236Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1771489183129150) 2026-02-19T08:19:43.190025Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1771489183129150 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-02-19T08:19:43.190341Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-02-19T08:19:43.195091Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-02-19T08:19:43.196332Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489183129150&action=1" } } } 2026-02-19T08:19:43.196464Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:43.196530Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:43.196644Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:43.197131Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-02-19T08:19:43.197263Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-02-19T08:19:43.203892Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488110019410795:2307], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489183129150&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-02-19T08:19:43.203928Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:43.204126Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489183129150&action=1" } } 2026-02-19T08:19:43.209494Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-02-19T08:19:43.209547Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:43.209614Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7608488110019410789:2207], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:43.209633Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:43.209645Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:43.209654Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:43.209693Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-02-19T08:19:43.210326Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-02-19T08:19:43.210398Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-02-19T08:19:43.217701Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:43.217745Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:43.217758Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:43.217765Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:43.217829Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-02-19T08:19:43.217852Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1771489183129150 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:43.232721Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:43.259124Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:43.259559Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-02-19T08:19:43.259576Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-02-19T08:19:43.276308Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:50526" 2026-02-19T08:19:43.276590Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488110019410831:2310], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489183129150&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-02-19T08:19:43.276604Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:43.276757Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489183129150&action=1" } } 2026-02-19T08:19:43.278567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schem ... uired_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:44.321682Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488114314378675:2373], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:44.321726Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:44.321759Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.322052Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488101429475484:2206], Recipient [1:7608488101429475600:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.322068Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:44.322574Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:44.405627Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488114314378695:2374], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:44.405655Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:44.405689Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.405852Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488101429475484:2206], Recipient [1:7608488101429475600:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.405862Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:44.406385Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:44.409096Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:44.415150Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2026-02-19T08:19:44.415174Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-02-19T08:19:44.419363Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-02-19T08:19:44.419584Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7608488110019410902:2207], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-02-19T08:19:44.419601Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-02-19T08:19:44.419616Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:44.419633Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:44.419673Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2026-02-19T08:19:44.419704Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1771489183129150 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:44.419787Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2904: Update database for /Root/users/user-1 confirmedsubdomain=2 2026-02-19T08:19:44.422679Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488114314378699:2375], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:44.422706Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:44.422759Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.422861Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488101429475484:2206], Recipient [1:7608488101429475600:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.422874Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:44.423359Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:44.450945Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488114314378704:2376], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:44.450974Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:44.451026Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.451145Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488101429475484:2206], Recipient [1:7608488101429475600:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.451159Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:44.451695Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:44.458965Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2026-02-19T08:19:44.459002Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:44.466600Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488114314378716:2377], Recipient [1:7608488101429475600:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:50526" } 2026-02-19T08:19:44.466629Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:44.466672Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.466762Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488101429475484:2206], Recipient [1:7608488101429475600:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:44.466773Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:44.472499Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-02-19T08:19:45.042061Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:19:45.042515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:19:48.349849Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608488107472685952:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:48.349930Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:49.134209Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:49.136056Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7608488133242490719:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:49.136149Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq |91.8%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> KqpQuery::PreparedQueryInvalidate >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> KqpParams::CheckQueryCacheForPreparedQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo3Consumers >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true >> KqpAnalyze::AnalyzeTable+ColumnStore >> TTransferTests::Create ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-02-19T08:17:59.012408Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1771489079012369 2026-02-19T08:17:59.660346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487662964366811:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:59.660400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:59.755180Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608487662555694568:2197];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:59.755236Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:59.793028Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:17:59.850228Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:18:00.133878Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:00.149821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:00.195278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:00.195391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:00.203716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:00.203800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:00.225801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:00.297950Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:18:00.303409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:00.366573Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:00.373807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:00.535130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:00.598604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00394e/r3tmp/yandexCH8bbD.tmp 2026-02-19T08:18:00.598624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00394e/r3tmp/yandexCH8bbD.tmp 2026-02-19T08:18:00.598769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00394e/r3tmp/yandexCH8bbD.tmp 2026-02-19T08:18:00.598837Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:00.703167Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:00.730031Z INFO: TTestServer started on Port 64970 GrpcPort 18029 2026-02-19T08:18:00.831005Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:18029 2026-02-19T08:18:01.036970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:04.125592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487684439204581:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.125738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.129822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487684439204593:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.129887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487684439204594:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.130007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.143402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:04.197920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487684030531274:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.198004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487684030531263:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.198169Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:04.272175Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487684030531280:2134] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:18:04.292120Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487684439204597:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:18:04.402935Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487684439204682:2985] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:04.633329Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608487684030531318:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:18:04.633190Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487684439204692:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:18:04.633192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:04.635552Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NWFmY2UyZmQtMTJkOTk3YjgtYWJkY2RlOGEtYTgyNThlMWE=, ActorId: [1:7608487684439204566:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfmgpv02ebg4bpvthbzbze, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:18:04.636576Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:18:04.633860Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=YTJkNzE0ZS1lZWRiNWM1MS02MWNhOGY2LWQ1OTY2NTBi, ActorId: [2:76084876840 ... ouched. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 3 parts 0 suffix '63' 2026-02-19T08:19:47.926214Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:903: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 1 WriteTimestampMS: 1771489182734 CreateTimestampMS: 1771489182714 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 2 WriteTimestampMS: 1771489182858 CreateTimestampMS: 1771489182714 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1771489182860 CreateTimestampMS: 1771489182714 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 4 WriteTimestampMS: 1771489182860 CreateTimestampMS: 1771489182715 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551607 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-02-19T08:19:47.926511Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:814: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) isMemoryLimitReached 0 EndOffset 4 ReadOffset 4 read result size 4 2026-02-19T08:19:47.926547Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1338: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-02-19T08:19:47.926603Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:828: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 93903777-fb888a6b-b9f9e99a-d64b131a has messages 1 2026-02-19T08:19:47.926767Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1957: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 read done: guid# 93903777-fb888a6b-b9f9e99a-d64b131a, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2026-02-19T08:19:47.926807Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2119: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 response to read: guid# 93903777-fb888a6b-b9f9e99a-d64b131a 2026-02-19T08:19:47.927078Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2162: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 Process answer. Aval parts: 0 2026-02-19T08:19:47.928327Z :DEBUG: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:19:47.928762Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 grpc read done: success# 1, data# { read { } } 2026-02-19T08:19:47.928882Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1856: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 got read request: guid# 7b470d1e-325049f3-f7c9c195-1634a836 2026-02-19T08:19:47.933929Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-02-19T08:19:47.934160Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-02-19T08:19:47.934227Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-02-19T08:19:47.934259Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-02-19T08:19:47.934295Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-02-19T08:19:47.934374Z :DEBUG: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-02-19T08:19:47.934646Z :INFO: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] Closing read session. Close timeout: 0.000000s 2026-02-19T08:19:47.934741Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-02-19T08:19:47.934808Z :INFO: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] Counters: { Errors: 0 CurrentSessionLifetimeMs: 60 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:19:47.934966Z :NOTICE: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:19:47.935038Z :DEBUG: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] [null] Abort session to cluster 2026-02-19T08:19:47.935812Z :NOTICE: [/Root] [/Root] [ae4a1770-ff697902-4dda3877-b604c557] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:19:47.936077Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0] Write session: close. Timeout = 0 ms 2026-02-19T08:19:47.936123Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0] Write session will now close 2026-02-19T08:19:47.936191Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0] Write session: aborting 2026-02-19T08:19:47.937540Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0] Write session: gracefully shut down, all writes complete 2026-02-19T08:19:47.937595Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0] Write session: destroy 2026-02-19T08:19:47.938016Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 grpc read done: success# 0, data# { } 2026-02-19T08:19:47.938051Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 grpc read failed 2026-02-19T08:19:47.938086Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 grpc closed 2026-02-19T08:19:47.938138Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_7955109829909721375_v1 is DEAD 2026-02-19T08:19:47.939900Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7608488127669965288:2570] disconnected. 2026-02-19T08:19:47.942080Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_7955109829909721375_v1 2026-02-19T08:19:47.942135Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037892] server disconnected, pipe [11:7608488127669965291:2573] destroyed 2026-02-19T08:19:47.942185Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037892] server disconnected, pipe [11:7608488101900161223:2511] destroyed 2026-02-19T08:19:47.942217Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:19:47.942253Z node 12 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:47.942277Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:47.942297Z node 12 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:47.942322Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:47.942337Z node 12 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:19:47.942404Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_7955109829909721375_v1 2026-02-19T08:19:47.939943Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7608488127669965288:2570] disconnected; active server actors: 1 2026-02-19T08:19:47.939967Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7608488127669965288:2570] client user disconnected session shared/user_11_1_7955109829909721375_v1 2026-02-19T08:19:47.940123Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0 grpc read done: success: 0 data: 2026-02-19T08:19:47.940142Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0 grpc read failed 2026-02-19T08:19:47.940175Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0 grpc closed 2026-02-19T08:19:47.940197Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|c75393e5-53327ecc-a23b5b12-a59781a_0 is DEAD 2026-02-19T08:19:47.941113Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:19:47.973398Z node 12 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:47.973428Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:47.973441Z node 12 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:47.973466Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:47.973484Z node 12 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:19:48.077613Z node 12 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:19:48.077663Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:48.077682Z node 12 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:19:48.077728Z node 12 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:19:48.077748Z node 12 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2026-02-19T08:19:23.678910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488023038707529:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:23.678976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:24.273925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:24.307566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:24.307664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:24.412086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:24.550025Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:24.577826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:24.770406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:24.770429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:24.770436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:24.787184Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:24.817406Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:25.164175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:28.020468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545346:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.020575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.021059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545356:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.021127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.299684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:28.621852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545514:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.621966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.625832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545517:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.625919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.663220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:19:28.677873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488023038707529:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:28.677969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Table has 1 shards 2026-02-19T08:19:28.913748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545640:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.913894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545656:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545658:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545659:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545660:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545661:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545662:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.914950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.921002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545676:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.924497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.924596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545684:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.924642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545687:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.924699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545678:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.929126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545699:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.929215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.935702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545728:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:28.935797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488044513545739:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resourc ... eshard_impl.cpp:2707: Change state for txid 281474976710693:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:19:48.768396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:19:48.768717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:19:48.768798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:19:48.770380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-02-19T08:19:48.770432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-02-19T08:19:48.770451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-02-19T08:19:48.770906Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state);tx_id=0; 2026-02-19T08:19:48.770929Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3155: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state) 2026-02-19T08:19:48.771276Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:909: SelfId: [1:7608488130412949336:2383], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7608488044513545632:2383]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7608488130412949336:2383]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-02-19T08:19:48.771347Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:7608488130412949308:2383], SessionActorId: [1:7608488044513545632:2383], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7608488044513545632:2383]. 2026-02-19T08:19:48.771512Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=YzFkMDU4M2QtYzgwNGYyYjMtNDcyYTc0YmMtMWU0MDlmZjI=, ActorId: [1:7608488044513545632:2383], ActorState: ExecuteState, LegacyTraceId: 01khtfqptn24pjjffyfcftx3qq, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7608488130412949309:2383] from: [1:7608488130412949308:2383] trace_id# 2026-02-19T08:19:48.771587Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:7608488130412949309:2383] TxId: 281474976720814. Ctx: { TraceId: 01khtfqptn24pjjffyfcftx3qq, Database: , SessionId: ydb://session/3?node_id=1&id=YzFkMDU4M2QtYzgwNGYyYjMtNDcyYTc0YmMtMWU0MDlmZjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-02-19T08:19:48.771841Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YzFkMDU4M2QtYzgwNGYyYjMtNDcyYTc0YmMtMWU0MDlmZjI=, ActorId: [1:7608488044513545632:2383], ActorState: ExecuteState, LegacyTraceId: 01khtfqptn24pjjffyfcftx3qq, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-02-19T08:19:48.784638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-02-19T08:19:48.784721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:19:48.784750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:19:48.784802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-02-19T08:19:48.786750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-02-19T08:19:48.855320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:19:48.855504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999778s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:19:48.857499Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488023038707840:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-02-19T08:19:48.862953Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488023038707840:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-02-19T08:19:48.961904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 106, next wakeup# 14.893391s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:19:48.965113Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-02-19T08:19:48.966417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 110, next wakeup# 14.888874s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:19:48.968852Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-02-19T08:19:48.973166Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-02-19T08:19:48.992162Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:19:48.992227Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:19:48.992846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:19:48.993125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:19:49.008560Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:19:49.012227Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:19:49.013219Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:19:49.013305Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:19:49.049800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 2643, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 585.036613s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:19:49.050465Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:148: Started compaction# 2 with cookie 1 of 72075186224037890 tableId# 38 localTid# 1001, requested from [1:7608488023038707840:2202], partsCount# 2, memtableSize# 624, memtableWaste# 3984, memtableRows# 1 2026-02-19T08:19:49.096098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, compactionInfo# {72057594046644480:2, SH# 3, Rows# 2489, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 584.990312s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:19:49.096168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:113: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 46 ms, with status# 0, next wakeup in# 584.990231s, rate# 1.157407407e-05, in queue# 1 shards, waiting after compaction# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:19:49.097414Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:148: Started compaction# 2 with cookie 1 of 72075186224037889 tableId# 38 localTid# 1001, requested from [1:7608488023038707840:2202], partsCount# 2, memtableSize# 624, memtableWaste# 3984, memtableRows# 1 2026-02-19T08:19:49.141479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:113: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 45 ms, with status# 0, next wakeup in# 584.944936s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 2 shards, running# 0 shards at schemeshard 72057594046644480 Table has 2 shards >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn >> TTransferTests::Create_Disabled >> KqpExplain::UpdateOnSecondary-UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink-UseStreamIndex >> KqpStats::MultiTxStatsFullYql >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |91.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2026-02-19T08:19:47.267222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488124833928748:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:47.267312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:47.680045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:47.714950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:47.715120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:47.778965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:47.824555Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:48.033703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:48.048567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:48.048586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:48.048604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:48.048703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:48.292916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:48.379677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:48.469034Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7608488129128897075:2306], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:47192" } 2026-02-19T08:19:48.469078Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-02-19T08:19:48.469100Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:48.469113Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:48.469227Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:47192" 2026-02-19T08:19:48.469388Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1771489188468414) 2026-02-19T08:19:48.532041Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1771489188468414 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-02-19T08:19:48.532303Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-02-19T08:19:48.547661Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-02-19T08:19:48.548900Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489188468414&action=1" } } } 2026-02-19T08:19:48.549040Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:48.549145Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:48.549298Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:48.549796Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-02-19T08:19:48.549940Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-02-19T08:19:48.559810Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488129128897083:2307], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489188468414&action=1" } UserToken: "" } 2026-02-19T08:19:48.559845Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:48.560490Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489188468414&action=1" } } 2026-02-19T08:19:48.563714Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-02-19T08:19:48.563768Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:48.563829Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7608488129128897080:2206], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:48.563874Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-02-19T08:19:48.563889Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:48.563900Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:48.563941Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-02-19T08:19:48.563959Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-02-19T08:19:48.564036Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-02-19T08:19:48.578826Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:48.578859Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:48.578874Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:48.578880Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:48.578929Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-02-19T08:19:48.578953Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1771489188468414 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:48.587966Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:48.597466Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:48.597537Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-02-19T08:19:48.597547Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-02-19T08:19:48.609558Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:47192" 2026-02-19T08:19:48.611428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:19:48.619034Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-02-19T08:19:48.619108Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-02-19T08:19:48.625981Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488129128897145:2310], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489188468414&action=1" } UserToken: "" } 2026-02-19T08:19:48.626007Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:48.626160Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489188468414&action=1" ... yTxCompletionResult: TxId: 281474976710660 2026-02-19T08:19:49.640471Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-02-19T08:19:49.640567Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:49.640695Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7608488133423865018:2206], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:49.640724Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-02-19T08:19:49.640739Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:49.640748Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:49.640803Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-02-19T08:19:49.640848Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1771489189554221 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:49.640910Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489189554221 issue= 2026-02-19T08:19:49.640991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-02-19T08:19:49.641053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-02-19T08:19:49.641081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-02-19T08:19:49.641116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-02-19T08:19:49.641162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-02-19T08:19:49.644946Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-02-19T08:19:49.648270Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-02-19T08:19:49.648362Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-02-19T08:19:49.648382Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:49.648663Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7608488124833929068:2205], Recipient [1:7608488124833929184:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-02-19T08:19:49.648704Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-02-19T08:19:49.648725Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:49.648733Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:49.648756Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-02-19T08:19:49.648779Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1771489189554221 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:49.650486Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-02-19T08:19:49.650534Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:49.650572Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-02-19T08:19:49.650672Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-02-19T08:19:49.651537Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-02-19T08:19:49.651633Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-02-19T08:19:49.657699Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-02-19T08:19:49.657821Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7608488133423865130:2206], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:49.657852Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-02-19T08:19:49.657888Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:49.657896Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:49.657934Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-02-19T08:19:49.657950Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-02-19T08:19:49.658900Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488133423865135:2385], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489189554221&action=2" } UserToken: "" } 2026-02-19T08:19:49.658923Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:49.659109Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489189554221&action=2" } } 2026-02-19T08:19:49.669101Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-02-19T08:19:49.669133Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-02-19T08:19:49.669141Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:49.669147Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-02-19T08:19:49.669209Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1771489189554221 2026-02-19T08:19:49.669220Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1771489189554221 issue= 2026-02-19T08:19:49.669230Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1771489189554221 issue= 2026-02-19T08:19:49.669242Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-02-19T08:19:49.669345Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1771489189554221 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-02-19T08:19:49.673439Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-02-19T08:19:49.673524Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-02-19T08:19:49.713886Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:49.716592Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7608488133423865154:2387], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489189554221&action=2" } UserToken: "" } 2026-02-19T08:19:49.716620Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-02-19T08:19:49.716785Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1771489189554221&action=2" ready: true status: SUCCESS } } 2026-02-19T08:19:49.722812Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7608488133423865157:2389], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:47192" } 2026-02-19T08:19:49.722848Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-02-19T08:19:49.723001Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-02-19T08:19:49.738386Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7608488133423865160:2390], Recipient [1:7608488124833929184:2206]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:47192" } 2026-02-19T08:19:49.738414Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-02-19T08:19:49.738588Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-02-19T08:19:49.751401Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:19:49.752438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert [GOOD] >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |91.8%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> TResourcePoolTest::ParallelAlterResourcePool >> TResourcePoolTest::ParallelCreateSameResourcePool |91.8%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::ConcurrentDropProtectionTest [GOOD] >> TBackupCollectionTests::BackupServiceDirectoryValidation >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsertWithOverlap-Covered [GOOD] >> TResourcePoolTest::CreateResourcePool >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 65378, msgbus: 21653 2026-02-19T08:14:14.533012Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486694288174739:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:14.537297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:14.791629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:14.819146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:14.819417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:14.854301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:14.901184Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:15.065298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:15.065324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:15.065330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:15.066219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:15.099784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:15.328784Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486694288174947:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:15.328865Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486698583143028:2741] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:15.329849Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486698583143028:2741] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.361053Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486698583143028:2741] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:15.374562Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486698583143028:2741] Handle TEvDescribeSchemeResult Forward to# [1:7608486698583143027:2740] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:15.377952Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486694288174947:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.377988Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486694288174947:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:14:15.378056Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486694288174947:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486698583143033:2746] 2026-02-19T08:14:15.479149Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486698583143033:2746] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:15.479239Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486698583143033:2746] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:15.479256Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486698583143033:2746] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:15.479336Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486698583143033:2746] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:15.479693Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486698583143033:2746] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.479888Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486698583143033:2746] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:15.480014Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486698583143033:2746] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:14:15.480177Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486698583143033:2746] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:14:15.480833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:15.483627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486698583143033:2746] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:14:15.483692Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486698583143033:2746] txid# 281474976710657 SEND to# [1:7608486698583143032:2745] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:14:15.517762Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486694288174947:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.517800Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486694288174947:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:14:15.517884Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486694288174947:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486698583143072:2779] 2026-02-19T08:14:15.519982Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486698583143072:2779] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:15.520056Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486698583143072:2779] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:15.520072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486698583143072:2779] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:15.520140Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486698583143072:2779] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:15.520502Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486698583143072:2779] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:15.520653Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486698583143072:2779] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:15.520753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486698583143072:2779] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:14:15.520974Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486698583143072:2779] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:14:15.521645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:15.524725Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486698583143072:2779] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:14:15.524769Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486698583143072:2779] txid# 281474976710658 SEND to# [1:7608486698583143071:2778] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:14:15.554019Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:15.560170Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486694288174947:2143] Handle TEvProposeTransaction 2026-02-19T08:14:15.560229Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:23 ... est# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:55.359199Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7608488159180417472:2995] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-02-19T08:19:55.359369Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [59:7608488159180417472:2995] txid# 281474976710661 HANDLE EvClientConnected 2026-02-19T08:19:55.363323Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [59:7608488159180417472:2995] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-02-19T08:19:55.363481Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488159180417472:2995] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:55.363520Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488159180417472:2995] txid# 281474976710661 SEND to# [59:7608488159180417402:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-02-19T08:19:55.401461Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488133410612370:2143] Handle TEvProposeTransaction 2026-02-19T08:19:55.401494Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488133410612370:2143] TxId# 281474976710662 ProcessProposeTransaction 2026-02-19T08:19:55.401536Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488133410612370:2143] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7608488159180417496:3007] 2026-02-19T08:19:55.403793Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488159180417496:3007] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:59532" 2026-02-19T08:19:55.403863Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488159180417496:3007] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:55.403883Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488159180417496:3007] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:19:55.403941Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488159180417496:3007] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:55.404276Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488159180417496:3007] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:55.404360Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [59:7608488159180417496:3007] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:55.496328Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7608488159180417496:3007] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-02-19T08:19:55.496499Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [59:7608488159180417496:3007] txid# 281474976710662 HANDLE EvClientConnected 2026-02-19T08:19:55.501403Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [59:7608488159180417496:3007] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-02-19T08:19:55.501473Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488159180417496:3007] txid# 281474976710662 SEND to# [59:7608488159180417495:2334] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-02-19T08:19:55.520398Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488133410612370:2143] Handle TEvProposeTransaction 2026-02-19T08:19:55.520430Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488133410612370:2143] TxId# 281474976710663 ProcessProposeTransaction 2026-02-19T08:19:55.520471Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488133410612370:2143] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7608488159180417510:3017] 2026-02-19T08:19:55.522704Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488159180417510:3017] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35318" 2026-02-19T08:19:55.522775Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488159180417510:3017] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:55.522795Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488159180417510:3017] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:19:55.522853Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488159180417510:3017] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:55.523201Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488159180417510:3017] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:55.523305Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [59:7608488159180417510:3017] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:55.523385Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7608488159180417510:3017] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-02-19T08:19:55.523529Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [59:7608488159180417510:3017] txid# 281474976710663 HANDLE EvClientConnected 2026-02-19T08:19:55.523955Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:19:55.527328Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [59:7608488159180417510:3017] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-02-19T08:19:55.527389Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488159180417510:3017] txid# 281474976710663 SEND to# [59:7608488159180417509:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-02-19T08:19:55.605192Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488133410612370:2143] Handle TEvProposeTransaction 2026-02-19T08:19:55.605230Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488133410612370:2143] TxId# 281474976710664 ProcessProposeTransaction 2026-02-19T08:19:55.605270Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488133410612370:2143] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7608488159180417541:3031] 2026-02-19T08:19:55.607366Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488159180417541:3031] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35354" 2026-02-19T08:19:55.607423Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488159180417541:3031] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:55.607438Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488159180417541:3031] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2026-02-19T08:19:55.607592Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488159180417541:3031] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:55.607646Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488159180417541:3031] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2026-02-19T08:19:55.607704Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488159180417541:3031] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:55.608031Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488159180417541:3031] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:55.608095Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [59:7608488159180417541:3031] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:55.608146Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7608488159180417541:3031] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2026-02-19T08:19:55.608275Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [59:7608488159180417541:3031] txid# 281474976710664 HANDLE EvClientConnected 2026-02-19T08:19:55.610592Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [59:7608488159180417541:3031] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2026-02-19T08:19:55.610671Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488159180417541:3031] txid# 281474976710664 SEND to# [59:7608488159180417540:2352] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} >> IndexBuildTest::RejectsCancel [GOOD] >> IndexBuildTest::NullsAreUniq >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2026-02-19T08:16:34.301911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:16:34.301982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:34.303354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:16:34.318008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:16:34.318401Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:16:34.318711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:16:34.368778Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:16:34.375959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:16:34.376161Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:16:34.378069Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:16:34.378153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:16:34.378202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:16:34.378600Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:16:34.378682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:16:34.378770Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:16:34.459552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:16:34.491191Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:16:34.491400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:16:34.491515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:16:34.491566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:16:34.491601Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:16:34.491634Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:16:34.491820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:34.491865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:34.492165Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:16:34.492261Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:16:34.492382Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:16:34.492426Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:16:34.492460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:16:34.492491Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:16:34.492538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:16:34.492578Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:16:34.492626Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:16:34.492726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:34.492848Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:34.492901Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:16:34.500306Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:16:34.500392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:16:34.500489Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:16:34.500674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:16:34.500748Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:16:34.500823Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:16:34.500870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:16:34.500908Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:16:34.500951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:16:34.500981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:16:34.501274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:16:34.501311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:16:34.501344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:16:34.501375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:16:34.501434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:16:34.501482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:16:34.501516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:16:34.501548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:16:34.501572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:16:34.515933Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:16:34.516033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:16:34.516087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:16:34.516130Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:16:34.516206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:16:34.516728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:34.516782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:16:34.516827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:16:34.516939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:16:34.516979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:16:34.517134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:16:34.517198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:16:34.517237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:16:34.517269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:16:34.521025Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:16:34.521099Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:16:34.521325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:34.521397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:16:34.521458Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:16:34.521495Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:16:34.521530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:16:34.521570Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:16:34.521603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ecution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:19:45.673383Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit FinishPropose 2026-02-19T08:19:45.673425Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit FinishPropose 2026-02-19T08:19:45.673485Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 8 ms, propose latency: 8 ms, status: COMPLETE 2026-02-19T08:19:45.673574Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is DelayComplete 2026-02-19T08:19:45.673609Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2026-02-19T08:19:45.673642Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit CompletedOperations 2026-02-19T08:19:45.673675Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2026-02-19T08:19:45.673747Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is Executed 2026-02-19T08:19:45.673777Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2026-02-19T08:19:45.673808Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 9437184 has finished 2026-02-19T08:19:45.678438Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:19:45.678535Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 9437184 on unit FinishPropose 2026-02-19T08:19:45.678611Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:19:49.181206Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269551617, Sender [3:104:2137], Recipient [3:241:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 104 RawX2: 12884904025 } 2026-02-19T08:19:49.181283Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3219: StateWork, processing event TEvDataShard::TEvGetShardState 2026-02-19T08:19:49.181770Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [3:505:2478], Recipient [3:241:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:49.181820Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:49.181865Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [3:504:2477], serverId# [3:505:2478], sessionId# [0:0:0] 2026-02-19T08:19:49.182165Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [3:104:2137], Recipient [3:241:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 104 RawX2: 12884904025 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2026-02-19T08:19:49.182214Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:19:49.182331Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:19:49.184515Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2026-02-19T08:19:49.220154Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-02-19T08:19:49.220267Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2026-02-19T08:19:49.220311Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2026-02-19T08:19:49.220355Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2026-02-19T08:19:49.220420Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-02-19T08:19:49.220491Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2026-02-19T08:19:49.220535Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-02-19T08:19:49.220562Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2026-02-19T08:19:49.220589Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BlockFailPoint 2026-02-19T08:19:49.220621Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BlockFailPoint 2026-02-19T08:19:49.220650Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-02-19T08:19:49.220681Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BlockFailPoint 2026-02-19T08:19:49.220712Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2026-02-19T08:19:49.220741Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-02-19T08:19:49.272138Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2026-02-19T08:19:49.272589Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 11 released its data 2026-02-19T08:19:49.272658Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-02-19T08:19:49.326624Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:19:49.326713Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-02-19T08:19:49.327624Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 11 at 9437184 restored its data 2026-02-19T08:19:49.377397Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:195: Tablet 9437184 is not ready for [0:11] execution 2026-02-19T08:19:49.377953Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 11 released its data 2026-02-19T08:19:49.378036Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-02-19T08:19:49.905060Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:19:49.905149Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-02-19T08:19:49.907896Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 11 at 9437184 restored its data 2026-02-19T08:19:50.249912Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2026-02-19T08:19:50.252252Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 11 released its data 2026-02-19T08:19:50.252372Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-02-19T08:19:50.260719Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:19:50.260802Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-02-19T08:19:50.268938Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 11 at 9437184 restored its data 2026-02-19T08:19:52.012856Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2026-02-19T08:19:52.012965Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:19:52.013037Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-02-19T08:19:52.013076Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:19:52.013110Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit FinishPropose 2026-02-19T08:19:52.013143Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit FinishPropose 2026-02-19T08:19:52.013192Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 10 ms, propose latency: 10 ms, status: COMPLETE 2026-02-19T08:19:52.013260Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is DelayComplete 2026-02-19T08:19:52.013291Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2026-02-19T08:19:52.013325Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit CompletedOperations 2026-02-19T08:19:52.013357Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2026-02-19T08:19:52.013407Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-02-19T08:19:52.013432Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2026-02-19T08:19:52.013459Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 9437184 has finished 2026-02-19T08:19:52.018468Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:19:52.018541Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:11] at 9437184 on unit FinishPropose 2026-02-19T08:19:52.018602Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TBackupCollectionTests::BackupServiceDirectoryValidation [GOOD] >> TBackupCollectionTests::BackupWithIndexes |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |91.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:57.839084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:57.839202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:57.839267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:57.839320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:57.839354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:57.839392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:57.839419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:57.839443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:57.839522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:57.839592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:57.840466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:57.840768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:57.932403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:57.932469Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:57.967807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:57.974319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:57.974586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:58.008022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:58.009371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:58.010165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:58.010550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:58.020478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:58.020705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:58.022170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:58.022243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:58.022351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:58.022396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:58.022436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:58.022620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:58.484405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.485486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.485619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.485771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.485887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.485967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.486613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... pp:265: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.577519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.577561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-02-19T08:19:59.577666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.577702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.578031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-02-19T08:19:59.578114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.578138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.578186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-02-19T08:19:59.578307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.578327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.578450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-02-19T08:19:59.578530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:19:59.578579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.578601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.578814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:19:59.578923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.578949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.578994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:19:59.579102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.579152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.579212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.579253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.579345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:19:59.579535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-02-19T08:19:59.579626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.579655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.579711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.579740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.579785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:19:59.579978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-02-19T08:19:59.580356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-02-19T08:19:59.580542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.580880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.580904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.581082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.581106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:846:2834] 2026-02-19T08:19:59.581214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.581236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:846:2834] TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 2026-02-19T08:19:59.585048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:19:59.585300Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 245us result status StatusSuccess 2026-02-19T08:19:59.585793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 Properties { Properties { key: "concurrent_query_limit" value: "20" } Properties { key: "query_cancel_after_seconds" value: "60" } Properties { key: "query_count_limit" value: "50" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:58.048327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:58.048436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:58.048513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:58.048569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:58.048605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:58.048642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:58.048676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:58.048713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:58.048810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:58.048887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:58.050115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:58.050444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:58.224241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:58.224307Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:58.248299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:58.249030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:58.249220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:58.271151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:58.271897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:58.272607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:58.272965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:58.286553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:58.286817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:58.288224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:58.288295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:58.288419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:58.288463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:58.288506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:58.288709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:58.470863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.471801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.471927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:58.472728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 02-19T08:19:59.660975Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 261us result status StatusSuccess 2026-02-19T08:19:59.661411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:19:59.661955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:19:59.662087Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 135us result status StatusSuccess 2026-02-19T08:19:59.662335Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-02-19T08:19:59.662569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-02-19T08:19:59.662605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-02-19T08:19:59.662679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-02-19T08:19:59.662704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-02-19T08:19:59.662756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-02-19T08:19:59.662782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-02-19T08:19:59.663258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-02-19T08:19:59.663389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.663430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:752:2740] 2026-02-19T08:19:59.663648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-02-19T08:19:59.663711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.663732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:752:2740] 2026-02-19T08:19:59.663814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-02-19T08:19:59.663893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:19:59.663912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:752:2740] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-02-19T08:19:59.664323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:19:59.664473Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 169us result status StatusSuccess 2026-02-19T08:19:59.664753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-02-19T08:19:59.671630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "NilNoviSubLuna" } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:19:59.671920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 128:0, path# /MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna 2026-02-19T08:19:59.672081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:19:59.674545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-02-19T08:19:59.674811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn >> TResourcePoolTest::CreateResourcePool [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo3Consumers [GOOD] |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |91.8%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:59.668560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:59.668658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:59.668723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:59.668760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:59.668821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:59.668871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:59.668910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:59.668939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:59.669032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:59.669112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:59.670018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:59.670330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:59.756591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:59.756657Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:59.772568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:59.774761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:59.774992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:59.790463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:59.791651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:59.792589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:59.792951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:59.797478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:59.797752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:59.799541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:59.799608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:59.799734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:59.800104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:59.800174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:59.800399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:59.959478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.960609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.960749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.960848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.960962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:59.961746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 5 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:00.942404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:20:00.942592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:20:00.942797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:20:00.942888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:20:00.945079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:20:00.946012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:20:00.946574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:20:00.948405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:00.948466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:20:00.948666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:20:00.948747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:20:00.948846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:00.948899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-02-19T08:20:00.948947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-02-19T08:20:00.948983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-02-19T08:20:00.949085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:20:00.949139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:20:00.949252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:20:00.949290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:00.949357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:20:00.949395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:00.949455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:20:00.949501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:00.949540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:20:00.949575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:20:00.949669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:20:00.949758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:20:00.949804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-02-19T08:20:00.949839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-02-19T08:20:00.951701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:00.951816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:00.951871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:20:00.951921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-02-19T08:20:00.951973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:20:00.953434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:00.953607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:00.953647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:20:00.953690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-02-19T08:20:00.953762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:20:00.953870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:20:00.959397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:20:00.960868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:20:00.961174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:20:00.961232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:20:00.961739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:20:00.961880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:20:00.961924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:756:2744] TestWaitNotification: OK eventTxId 102 2026-02-19T08:20:00.962498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:00.962787Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 323us result status StatusSuccess 2026-02-19T08:20:00.963264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo0Consumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2026-02-19T08:17:45.270564Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487600113915551:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:45.270799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:45.376107Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:17:46.017814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:46.094379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:46.094480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:46.223519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:46.258905Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487600113915411:2081] 1771489065214884 != 1771489065214887 2026-02-19T08:17:46.297101Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:46.310039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:17:46.313979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:46.317633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:46.614417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0033b1/r3tmp/yandexOdAtkM.tmp 2026-02-19T08:17:46.614441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0033b1/r3tmp/yandexOdAtkM.tmp 2026-02-19T08:17:46.614627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0033b1/r3tmp/yandexOdAtkM.tmp 2026-02-19T08:17:46.614714Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:46.745678Z INFO: TTestServer started on Port 23445 GrpcPort 6918 PQClient connected to localhost:6918 2026-02-19T08:17:47.437456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:47.459233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:47.624150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.870360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:17:47.901891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-02-19T08:17:50.270971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487600113915551:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:50.271024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:50.978894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621588752948:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.979057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.982112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621588752961:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.982214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621588752962:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.982369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.991137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:50.992211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621588752994:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.992318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.997909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621588752998:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.998568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.022743Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487621588752965:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:17:51.276557Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487625883720319:2650] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:51.320367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.407794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.524610Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608487625883720330:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:17:51.525769Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NGIwNjJjNjctOGFlNzFkZC1lNmZiYTUzMy04MTBhNzQyOA==, ActorId: [1:7608487621588752945:2337], ActorState: ExecuteState, LegacyTraceId: 01khtfm3v5bh7jr8c817ge69z8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:17:51.534737Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:17:51.557145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.lo ... 5389 Values: 635389 Values: 635389 Values: 10099 Values: 26118 Values: 2 Values: 0 Values: 0 Values: 0 Values: 20000000000 Values: 0 Values: 0 Values: 0 ConsumerAggregatedCounters { Consumer: "consumer-1" Values: 1771489137425 Values: 1771489135367 Values: 3052 Values: 737127 Values: 1771489196967 Values: 9936 Values: 0 Values: 5822 Values: 1 Values: 0 Values: 737127 Values: 0 Values: 0 Values: 0 Values: 838079 Values: 838079 Values: 838079 Values: 838079 Values: 838079 Values: 838079 Values: 838079 Values: 838079 Values: 1000000000 Values: 0 Values: 9936 Values: 1771489196967 Values: 13 Values: 3052 Values: 0 } ExtendedCounters { Values: 89 Values: 737127 Values: 0 } } Generation: 1 Cookie: 58 ScaleStatus: NORMAL } PartResult { Partition: 52 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 1771489136 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 652255 AvgWriteSpeedPerHour: 652255 AvgWriteSpeedPerDay: 652255 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 860370 AvgReadSpeedPerHour: 860370 AvgReadSpeedPerDay: 860370 ReadBytesQuota: 1000000000 WriteBytesQuota: 1048576 PartitionSize: 0 StartOffset: 0 EndOffset: 3133 LastWriteTimestampMs: 1771489173529 WriteLagMs: 9933 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 652255 AvgQuotaSpeedPerHour: 652255 AvgQuotaSpeedPerDay: 652255 SourceIdCount: 8 SourceIdRetentionPeriodSec: 26 ConsumerResult { Consumer: "consumer-1" AvgReadSpeedPerMin: 860370 AvgReadSpeedPerHour: 860370 AvgReadSpeedPerDay: 860370 WriteLagMs: 9933 ReadLagMs: 0 LastReadTimestampMs: 1771489197187 ReadingFinished: false CommitedLagMs: 59583 } UsedReserveSize: 0 AggregatedCounters { Values: 1771489136594 Values: 1 Values: 1 Values: 8 Values: 8 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 0 Values: 0 Values: 1048576 Values: 9933 Values: 1771489173529 Values: 0 Values: 0 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 652255 Values: 10367 Values: 26112 Values: 2 Values: 0 Values: 0 Values: 0 Values: 20000000000 Values: 0 Values: 0 Values: 0 ConsumerAggregatedCounters { Consumer: "consumer-1" Values: 1771489137604 Values: 1771489135367 Values: 3133 Values: 756464 Values: 1771489196967 Values: 9933 Values: 0 Values: 10583 Values: 1 Values: 0 Values: 756464 Values: 0 Values: 0 Values: 0 Values: 860370 Values: 860370 Values: 860370 Values: 860370 Values: 860370 Values: 860370 Values: 860370 Values: 860370 Values: 1000000000 Values: 0 Values: 9933 Values: 1771489196967 Values: 14 Values: 3133 Values: 0 } ExtendedCounters { Values: 90 Values: 756464 Values: 0 } } Generation: 1 Cookie: 58 ScaleStatus: NORMAL } 2026-02-19T08:19:57.264470Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-02-19T08:19:57.264486Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-02-19T08:19:57.264969Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__mlp_balancing.cpp:121: Handle TEvPersQueue::TEvStatusResponse TabletId: 72075186224037922 PartResult { Partition: 53 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 1771489136 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 572919 AvgWriteSpeedPerHour: 572919 AvgWriteSpeedPerDay: 572919 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 755677 AvgReadSpeedPerHour: 755677 AvgReadSpeedPerDay: 755677 ReadBytesQuota: 1000000000 WriteBytesQuota: 1048576 PartitionSize: 0 StartOffset: 0 EndOffset: 2752 LastWriteTimestampMs: 1771489173737 WriteLagMs: 9998 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 572919 AvgQuotaSpeedPerHour: 572919 AvgQuotaSpeedPerDay: 572919 SourceIdCount: 8 SourceIdRetentionPeriodSec: 26 ConsumerResult { Consumer: "consumer-1" AvgReadSpeedPerMin: 755677 AvgReadSpeedPerHour: 755677 AvgReadSpeedPerDay: 755677 WriteLagMs: 9998 ReadLagMs: 0 LastReadTimestampMs: 1771489197187 ReadingFinished: false CommitedLagMs: 59581 } UsedReserveSize: 0 AggregatedCounters { Values: 1771489136602 Values: 2 Values: 1 Values: 8 Values: 8 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 0 Values: 0 Values: 1048576 Values: 9998 Values: 1771489173737 Values: 0 Values: 0 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 572919 Values: 9106 Values: 26114 Values: 2 Values: 0 Values: 0 Values: 0 Values: 20000000000 Values: 0 Values: 0 Values: 0 ConsumerAggregatedCounters { Consumer: "consumer-1" Values: 1771489137606 Values: 1771489135367 Values: 2752 Values: 665368 Values: 1771489196959 Values: 9998 Values: 0 Values: 7726 Values: 1 Values: 0 Values: 665368 Values: 0 Values: 0 Values: 0 Values: 755677 Values: 755677 Values: 755677 Values: 755677 Values: 755677 Values: 755677 Values: 755677 Values: 755677 Values: 1000000000 Values: 0 Values: 9998 Values: 1771489196959 Values: 12 Values: 2752 Values: 0 } ExtendedCounters { Values: 86 Values: 665368 Values: 0 } } Generation: 1 Cookie: 58 ScaleStatus: NORMAL } PartResult { Partition: 54 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 1771489136 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 658056 AvgWriteSpeedPerHour: 658056 AvgWriteSpeedPerDay: 658056 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 867993 AvgReadSpeedPerHour: 867993 AvgReadSpeedPerDay: 867993 ReadBytesQuota: 1000000000 WriteBytesQuota: 1048576 PartitionSize: 0 StartOffset: 0 EndOffset: 3161 LastWriteTimestampMs: 1771489174180 WriteLagMs: 10000 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 658056 AvgQuotaSpeedPerHour: 658056 AvgQuotaSpeedPerDay: 658056 SourceIdCount: 8 SourceIdRetentionPeriodSec: 26 ConsumerResult { Consumer: "consumer-1" AvgReadSpeedPerMin: 867993 AvgReadSpeedPerHour: 867993 AvgReadSpeedPerDay: 867993 WriteLagMs: 10000 ReadLagMs: 0 LastReadTimestampMs: 1771489197187 ReadingFinished: false CommitedLagMs: 59575 } UsedReserveSize: 0 AggregatedCounters { Values: 1771489136602 Values: 0 Values: 1 Values: 8 Values: 8 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 0 Values: 0 Values: 1048576 Values: 10000 Values: 1771489174180 Values: 0 Values: 0 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 658056 Values: 10459 Values: 26111 Values: 2 Values: 0 Values: 0 Values: 0 Values: 20000000000 Values: 0 Values: 0 Values: 0 ConsumerAggregatedCounters { Consumer: "consumer-1" Values: 1771489137612 Values: 1771489135367 Values: 3161 Values: 763018 Values: 1771489196959 Values: 10000 Values: 0 Values: 3683 Values: 1 Values: 0 Values: 763018 Values: 0 Values: 0 Values: 0 Values: 867993 Values: 867993 Values: 867993 Values: 867993 Values: 867993 Values: 867993 Values: 867993 Values: 867993 Values: 1000000000 Values: 0 Values: 10000 Values: 1771489196959 Values: 14 Values: 3161 Values: 0 } ExtendedCounters { Values: 89 Values: 763018 Values: 0 } } Generation: 1 Cookie: 58 ScaleStatus: NORMAL } 2026-02-19T08:19:57.264980Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-02-19T08:19:57.264991Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2026-02-19T08:19:57.266341Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:532: [72075186224037894][streamImpl] Send TEvPeriodicTopicStats PathId: 51 Generation: 1 StatsReportRound: 118 DataSize: 17565260 UsedReserveSize: 0 2026-02-19T08:19:57.266391Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186224037894][streamImpl] ProcessPendingStats. PendingUpdates size 55 2026-02-19T08:19:57.484291Z :INFO: [/Root] [/Root] [ab16713b-5090648b-862f2696-cb448318] Closing read session. Close timeout: 0.000000s 2026-02-19T08:19:57.484640Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:origin/feed/streamImpl:46:55:3151:0 -:origin/feed/streamImpl:45:54:3181:0 -:origin/feed/streamImpl:52:53:3132:0 -:origin/feed/streamImpl:51:52:3051:0 -:origin/feed/streamImpl:50:51:3010:0 -:origin/feed/streamImpl:49:50:2666:0 -:origin/feed/streamImpl:30:49:3802:0 -:origin/feed/streamImpl:29:48:3534:0 -:origin/feed/streamImpl:38:47:3645:0 -:origin/feed/streamImpl:37:46:3276:0 -:origin/feed/streamImpl:44:45:3565:0 -:origin/feed/streamImpl:43:44:3166:0 -:origin/feed/streamImpl:39:43:3324:0 -:origin/feed/streamImpl:42:42:3653:0 -:origin/feed/streamImpl:40:41:3649:0 -:origin/feed/streamImpl:41:40:3302:0 -:origin/feed/streamImpl:36:39:3740:0 -:origin/feed/streamImpl:35:38:3350:0 -:origin/feed/streamImpl:34:37:3672:0 -:origin/feed/streamImpl:33:36:3341:0 -:origin/feed/streamImpl:48:35:3053:0 -:origin/feed/streamImpl:47:34:3107:0 -:origin/feed/streamImpl:54:33:3160:0 -:origin/feed/streamImpl:53:32:2751:0 -:origin/feed/streamImpl:22:31:1790:0 -:origin/feed/streamImpl:16:30:1896:0 -:origin/feed/streamImpl:15:29:1737:0 -:origin/feed/streamImpl:28:28:3442:0 -:origin/feed/streamImpl:27:27:3119:0 -:origin/feed/streamImpl:17:26:1826:0 -:origin/feed/streamImpl:23:25:1730:0 -:origin/feed/streamImpl:25:24:1737:0 -:origin/feed/streamImpl:32:22:3087:0 -:origin/feed/streamImpl:31:21:3292:0 -:origin/feed/streamImpl:18:20:1805:0 -:origin/feed/streamImpl:26:18:1769:0 -:origin/feed/streamImpl:20:17:1810:0 -:origin/feed/streamImpl:19:16:1836:0 -:origin/feed/streamImpl:10:15:1711:0 -:origin/feed/streamImpl:9:14:2055:0 -:origin/feed/streamImpl:12:13:2001:0 -:origin/feed/streamImpl:11:12:2286:0 -:origin/feed/streamImpl:14:11:1816:0 -:origin/feed/streamImpl:8:10:1949:0 -:origin/feed/streamImpl:24:19:1826:0 -:origin/feed/streamImpl:13:9:1758:0 -:origin/feed/streamImpl:7:8:1661:0 -:origin/feed/streamImpl:3:7:2492:0 -:origin/feed/streamImpl:4:6:1938:0 -:origin/feed/streamImpl:21:23:1730:0 -:origin/feed/streamImpl:6:5:1855:0 -:origin/feed/streamImpl:5:4:2039:0 -:origin/feed/streamImpl:2:3:2031:0 -:origin/feed/streamImpl:1:2:2290:0 -:origin/feed/streamImpl:0:1:108350:0 2026-02-19T08:19:57.484694Z :INFO: [/Root] [/Root] [ab16713b-5090648b-862f2696-cb448318] Counters: { Errors: 0 CurrentSessionLifetimeMs: 27295 BytesRead: 46043211 MessagesRead: 250000 BytesReadCompressed: 46043211 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:19:57.484805Z :NOTICE: [/Root] [/Root] [ab16713b-5090648b-862f2696-cb448318] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:19:57.484846Z :DEBUG: [/Root] [/Root] [ab16713b-5090648b-862f2696-cb448318] [] Abort session to cluster 2026-02-19T08:19:57.485371Z :NOTICE: [/Root] [/Root] [ab16713b-5090648b-862f2696-cb448318] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:19:57.490143Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][streamImpl] pipe [1:7608488055380469561:4805] disconnected. 2026-02-19T08:19:57.490187Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][streamImpl] pipe [1:7608488055380469561:4805] disconnected; active server actors: 1 2026-02-19T08:19:57.490214Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][streamImpl] pipe [1:7608488055380469561:4805] client consumer-1 disconnected session consumer-1_1_1_7416075884109549924_v1 >>>>> 2026-02-19T08:19:57.493678Z End |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/prefixed_vector/unittest >> KqpPrefixedVectorIndexes::EmptyPrefixedVectorIndexInsertWithOverlap-Covered [GOOD] Test command err: 2026-02-19T08:11:57.826417Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486105489817812:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:11:57.826607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:11:58.206560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:11:58.216816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:11:58.216903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:11:58.269739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:11:58.345838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486105489817678:2081] 1771488717810868 != 1771488717810871 2026-02-19T08:11:58.350578Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:11:58.431978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:11:58.442459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:11:58.442484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:11:58.442490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:11:58.442576Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:11:58.835825Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:11:58.900879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:11:58.911890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:11:58.983364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.178390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.326017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:11:59.414652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.356075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122669688739:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.356181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.356497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486122669688748:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.356533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:01.737376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.776843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.815848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.854801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.896537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.942042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:01.985922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:02.040445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:12:02.120308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486126964656916:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:02.120526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:02.120896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486126964656921:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:02.120932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486126964656922:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:02.121028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:12:02.125554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:12:02.140057Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486126964656925:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:12:02.228853Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486126964656976:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:12:02.823186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486105489817812:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:12:02.823233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:12:03.785494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608486131259624571:3971], Recipient [1:7608486105489818040:2148]: NKikimr::TE ... X_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037933 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 57] state 'Ready' dataSize 1190 rowCount 30 cpuUsage 0.0175 2026-02-19T08:19:55.474854Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037933 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 57] raw table stats: DataSize: 1190 RowCount: 30 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771489190830 LastUpdateTime: 1771489028145 ImmediateTxCompleted: 2 PlannedTxCompleted: 4 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 30 RowDeletes: 0 RowReads: 0 RangeReads: 35 PartCount: 1 RangeReadRows: 350 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 1190 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:19:55.474880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.099995s, queue# 1 2026-02-19T08:19:55.475103Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [3:7608487429086064412:2560], Recipient [3:7608487394726323060:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037930 TableLocalId: 58 Generation: 1 Round: 16 TableStats { DataSize: 155 RowCount: 3 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771489190812 LastUpdateTime: 1771489028145 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 3 RowDeletes: 0 RowReads: 0 RangeReads: 38 PartCount: 1 RangeReadRows: 35 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 155 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 1 LocksWholeShard: 0 LocksBroken: 1 } TabletMetrics { CPU: 162 Memory: 133624 Storage: 276 GroupWriteThroughput { GroupID: 2181038080 Channel: 1 Throughput: 51 } GroupWriteThroughput { GroupID: 2181038080 Channel: 0 Throughput: 23 } } ShardState: 2 UserTablePartOwners: 72075186224037930 NodeId: 3 StartTime: 1771489025370 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:19:55.475119Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:19:55.475139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037930 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 58] state 'Ready' dataSize 155 rowCount 3 cpuUsage 0.0162 2026-02-19T08:19:55.475239Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037930 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 58] raw table stats: DataSize: 155 RowCount: 3 IndexSize: 0 InMemSize: 0 LastAccessTime: 1771489190812 LastUpdateTime: 1771489028145 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 3 RowDeletes: 0 RowReads: 0 RangeReads: 38 PartCount: 1 RangeReadRows: 35 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 155 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 1 LocksWholeShard: 0 LocksBroken: 1 2026-02-19T08:19:55.475452Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [3:7608487429086064441:2561], Recipient [3:7608487394726323060:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037931 TableLocalId: 56 Generation: 1 Round: 16 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 18 Memory: 119576 GroupWriteThroughput { GroupID: 2181038080 Channel: 1 Throughput: 43 } GroupWriteThroughput { GroupID: 2181038080 Channel: 0 Throughput: 18 } } ShardState: 2 UserTablePartOwners: 72075186224037931 NodeId: 3 StartTime: 1771489025373 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:19:55.475467Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:19:55.475485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037931 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 56] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0018 2026-02-19T08:19:55.475571Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037931 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 56] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:19:55.580161Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:7608487394726323060:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:19:55.580215Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:19:55.580228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 3 2026-02-19T08:19:55.580291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 3 2026-02-19T08:19:55.580308Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 3 2026-02-19T08:19:55.580386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 57 shard idx 72057594046644480:44 data size 1190 row count 30 2026-02-19T08:19:55.580437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037933 maps to shardIdx: 72057594046644480:44 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 57], pathId map=indexImplPostingTable, is column=0, is olap=0, RowCount 30, DataSize 1190 2026-02-19T08:19:55.580450Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037933, followerId 0 2026-02-19T08:19:55.580529Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:44 with partCount# 1, rowCount# 30, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:19:55.580593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037933: SplitByLoadNotEnabledForTable 2026-02-19T08:19:55.580636Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 58 shard idx 72057594046644480:45 data size 155 row count 3 2026-02-19T08:19:55.580665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037930 maps to shardIdx: 72057594046644480:45 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 58], pathId map=indexImplPrefixTable, is column=0, is olap=0, RowCount 3, DataSize 155 2026-02-19T08:19:55.580675Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037930, followerId 0 2026-02-19T08:19:55.580725Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:45 with partCount# 1, rowCount# 3, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:19:55.580746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037930: SplitByLoadNotEnabledForTable 2026-02-19T08:19:55.580769Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 56 shard idx 72057594046644480:43 data size 0 row count 0 2026-02-19T08:19:55.580792Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037931 maps to shardIdx: 72057594046644480:43 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 56], pathId map=indexImplLevelTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:19:55.580801Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037931, followerId 0 2026-02-19T08:19:55.580834Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:43 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:19:55.580851Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037931: SplitByLoadNotEnabledForTable 2026-02-19T08:19:55.580909Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:19:55.581352Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:7608487394726323060:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:19:55.581371Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:19:55.581382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:19:55.849891Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:7608487394726323060:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:19:55.849943Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:19:55.849992Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [3:7608487394726323060:2154], Recipient [3:7608487394726323060:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:19:55.850020Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection >> TBackupCollectionTests::BackupWithIndexes [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 11744, msgbus: 6023 2026-02-19T08:14:03.604502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486647289914924:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:03.604625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:03.858156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:03.884563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:03.884683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:03.917003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:03.924192Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:04.058207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:04.058228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:04.058238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:04.058347Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:04.095770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:04.288511Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486647289915132:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:04.288570Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486651584883205:2732] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:04.288989Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486651584883205:2732] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:04.331813Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486651584883205:2732] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:04.345669Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486651584883205:2732] Handle TEvDescribeSchemeResult Forward to# [1:7608486651584883204:2731] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:04.351792Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486647289915132:2143] Handle TEvProposeTransaction 2026-02-19T08:14:04.351820Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486647289915132:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:14:04.351878Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486647289915132:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608486651584883210:2737] 2026-02-19T08:14:04.492949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486651584883210:2737] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:04.493052Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486651584883210:2737] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:14:04.493074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486651584883210:2737] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:04.493161Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486651584883210:2737] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:04.493522Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486651584883210:2737] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:04.493628Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486651584883210:2737] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:04.494040Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486651584883210:2737] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:14:04.494212Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486651584883210:2737] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:14:04.495039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:04.504752Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486651584883210:2737] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:14:04.504814Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486651584883210:2737] txid# 281474976715657 SEND to# [1:7608486651584883209:2736] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:14:04.525740Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486647289915132:2143] Handle TEvProposeTransaction 2026-02-19T08:14:04.525770Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486647289915132:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-02-19T08:14:04.525799Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486647289915132:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7608486651584883249:2773] 2026-02-19T08:14:04.527724Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486651584883249:2773] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:04.527770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486651584883249:2773] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:14:04.527792Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486651584883249:2773] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:04.527874Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486651584883249:2773] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:04.528178Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486651584883249:2773] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:04.528281Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486651584883249:2773] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:04.528341Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486651584883249:2773] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-02-19T08:14:04.528472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486651584883249:2773] txid# 281474976715658 HANDLE EvClientConnected 2026-02-19T08:14:04.529723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:04.538474Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486651584883249:2773] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-02-19T08:14:04.538525Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486651584883249:2773] txid# 281474976715658 SEND to# [1:7608486651584883248:2772] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-02-19T08:14:04.618208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:06.788287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486660174817987:2334], DatabaseId: /dc-1, PoolId: default, Failed t ... ImV4cCI6MTc3MTUzMjM4OSwiaWF0IjoxNzcxNDg5MTg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:42020" 2026-02-19T08:19:50.832801Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488139590455741:3037] txid# 281474976715666 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:50.832823Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488139590455741:3037] txid# 281474976715666 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:50.832980Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488139590455741:3037] txid# 281474976715666 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:50.833062Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488139590455741:3037] txid# 281474976715666 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:50.833121Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488139590455741:3037] txid# 281474976715666 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:50.833414Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488139590455741:3037] txid# 281474976715666 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:50.833454Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488139590455741:3037] txid# 281474976715666, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:50.833556Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488139590455741:3037] txid# 281474976715666, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:50.833593Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488139590455741:3037] txid# 281474976715666 SEND to# [59:7608488139590455740:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:50.834832Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=Mjk0MjY2ZjMtN2M2NjdmN2UtNjJhM2U1NTQtMzY0NmZlNg==, ActorId: [59:7608488135295488404:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfqrw8drmpb3zehhj8qmdj, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:50.837014Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488105230715977:2143] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:50.837046Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488105230715977:2143] TxId# 281474976715667 ProcessProposeKqpTransaction 2026-02-19T08:19:54.814343Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488105230715977:2143] Handle TEvProposeTransaction 2026-02-19T08:19:54.814386Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488105230715977:2143] TxId# 281474976715668 ProcessProposeTransaction 2026-02-19T08:19:54.814435Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488105230715977:2143] Cookie# 0 userReqId# "" txid# 281474976715668 SEND to# [59:7608488156770324998:3070] 2026-02-19T08:19:54.816633Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488156770324998:3070] txid# 281474976715668 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM4OSwiaWF0IjoxNzcxNDg5MTg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.zkvIZ4OEkTcnLlJLU7pZS3TnGZ9QRFQKLeeQffpFWcm-mTiSJ4GsXpWHT-YOMC97Lihnp4Dkb3aiU2awuTNJzomfzqNC5Cndq-lC9YGB_JVh-R1kSfyp1WobIfP9OZ_9CzAz8v5Vdv9RaqzJvMS2TtVrzkxOM9moqLo5EzugLmLh_-m_5sY3RPpOAtq9Y7f2pcYwXktuyL6Of9HnaU6rNH6Hgnhoro9YIoSKvSqCbWW5fDGZD8sHE6BNqmySfuoqC_K-U-d7AQeuZidf6cOdjcjl9nbBwU7tShvCycpOOGG85Ulbv2r8OGoqeXbMm_4yJGXRvuVbWDLd_3oIIBYlrw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM4OSwiaWF0IjoxNzcxNDg5MTg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:55718" 2026-02-19T08:19:54.816709Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488156770324998:3070] txid# 281474976715668 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:19:54.816731Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488156770324998:3070] txid# 281474976715668 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:19:54.816922Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488156770324998:3070] txid# 281474976715668 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:19:54.816988Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488156770324998:3070] txid# 281474976715668 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:19:54.817039Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488156770324998:3070] txid# 281474976715668 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:54.817319Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488156770324998:3070] txid# 281474976715668 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:54.817346Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488156770324998:3070] txid# 281474976715668, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:19:54.817436Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488156770324998:3070] txid# 281474976715668, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:19:54.817463Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488156770324998:3070] txid# 281474976715668 SEND to# [59:7608488156770324997:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:19:54.817939Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=Mjk0MjY2ZjMtN2M2NjdmN2UtNjJhM2U1NTQtMzY0NmZlNg==, ActorId: [59:7608488135295488404:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfqws40awcrcdrhfz9dm3j, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:19:54.825374Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488105230715977:2143] Handle TEvExecuteKqpTransaction 2026-02-19T08:19:54.825414Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488105230715977:2143] TxId# 281474976715669 ProcessProposeKqpTransaction 2026-02-19T08:19:57.637862Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:57.637894Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:01.546148Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488105230715977:2143] Handle TEvProposeTransaction 2026-02-19T08:20:01.546186Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488105230715977:2143] TxId# 281474976715670 ProcessProposeTransaction 2026-02-19T08:20:01.546245Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488105230715977:2143] Cookie# 0 userReqId# "" txid# 281474976715670 SEND to# [59:7608488186835096177:3116] 2026-02-19T08:20:01.548887Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488186835096177:3116] txid# 281474976715670 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM4OSwiaWF0IjoxNzcxNDg5MTg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.zkvIZ4OEkTcnLlJLU7pZS3TnGZ9QRFQKLeeQffpFWcm-mTiSJ4GsXpWHT-YOMC97Lihnp4Dkb3aiU2awuTNJzomfzqNC5Cndq-lC9YGB_JVh-R1kSfyp1WobIfP9OZ_9CzAz8v5Vdv9RaqzJvMS2TtVrzkxOM9moqLo5EzugLmLh_-m_5sY3RPpOAtq9Y7f2pcYwXktuyL6Of9HnaU6rNH6Hgnhoro9YIoSKvSqCbWW5fDGZD8sHE6BNqmySfuoqC_K-U-d7AQeuZidf6cOdjcjl9nbBwU7tShvCycpOOGG85Ulbv2r8OGoqeXbMm_4yJGXRvuVbWDLd_3oIIBYlrw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjM4OSwiaWF0IjoxNzcxNDg5MTg5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:55718" 2026-02-19T08:20:01.548969Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488186835096177:3116] txid# 281474976715670 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:01.548990Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488186835096177:3116] txid# 281474976715670 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:01.549164Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488186835096177:3116] txid# 281474976715670 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:01.549236Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488186835096177:3116] txid# 281474976715670 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:01.549285Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488186835096177:3116] txid# 281474976715670 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:01.549659Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488186835096177:3116] txid# 281474976715670 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:01.549687Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488186835096177:3116] txid# 281474976715670, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:01.549802Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488186835096177:3116] txid# 281474976715670, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:01.549831Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488186835096177:3116] txid# 281474976715670 SEND to# [59:7608488186835096176:2362] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:01.550355Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=Mjk0MjY2ZjMtN2M2NjdmN2UtNjJhM2U1NTQtMzY0NmZlNg==, ActorId: [59:7608488135295488404:2362], ActorState: ExecuteState, LegacyTraceId: 01khtfr3bhbed826gy96cnhyqa, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:01.556868Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488105230715977:2143] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:01.556907Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488105230715977:2143] TxId# 281474976715671 ProcessProposeKqpTransaction |91.9%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> IndexBuildTest::NullsAreUniq [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> KqpExplain::UpdateOnSecondary+UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink+UseStreamIndex [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink+UseStreamIndex >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> TableCreator::CreateTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::NullsAreUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:37.129001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:37.129085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:37.129142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:37.129195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:37.129235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:37.129262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:37.129286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:37.129310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:37.129362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:37.129462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:37.130242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:37.130556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:37.222599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:37.222653Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:37.234819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:37.235518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:37.235730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:37.246745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:37.247338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:37.247887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:37.248134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:37.250952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:37.251160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:37.252408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:37.252474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:37.252607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:37.252655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:37.252702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:37.252900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:37.397148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.401922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.402004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.402070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.402149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.402239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:37.402336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-02-19T08:20:06.108176Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 2/2, is published: true 2026-02-19T08:20:06.108241Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1019:2956] message: TxId: 281474976725761 2026-02-19T08:20:06.108288Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-02-19T08:20:06.108325Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-02-19T08:20:06.108358Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976725761:0 2026-02-19T08:20:06.108427Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-02-19T08:20:06.108459Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:1 2026-02-19T08:20:06.108480Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976725761:1 2026-02-19T08:20:06.108514Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 34] was 3 2026-02-19T08:20:06.115325Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-02-19T08:20:06.115432Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976725761 2026-02-19T08:20:06.115505Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 107, txId# 281474976725761 2026-02-19T08:20:06.115658Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1675:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0}, txId# 281474976725761 2026-02-19T08:20:06.117751Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking 2026-02-19T08:20:06.117903Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1675:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-02-19T08:20:06.117970Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:20:06.119888Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done 2026-02-19T08:20:06.120034Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1675:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-02-19T08:20:06.120081Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-02-19T08:20:06.120211Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:20:06.120264Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [5:1692:3559] TestWaitNotification: OK eventTxId 107 2026-02-19T08:20:06.121132Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-02-19T08:20:06.121423Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-02-19T08:20:06.122418Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-02-19T08:20:06.122696Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 310us result status StatusSuccess 2026-02-19T08:20:06.123262Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "test_index" LocalPathId: 33 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index1" KeyColumnNames: "index2" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TBackupCollectionTests::BackupWithIndexesOmit [GOOD] >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService >> TBackupCollectionTests::BackupWithIndexesDefault >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit |91.9%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TProxyActorTest::TestCreateSemaphoreInterrupted |91.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TTransferTests::Alter [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] >> KqpExplain::ExplainDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: 2026-02-19T08:19:29.636089Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488047427998584:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:29.636194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:29.981726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:29.981843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:30.015353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:30.034317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:30.037292Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:30.039134Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488047427998429:2081] 1771489169626674 != 1771489169626677 2026-02-19T08:19:30.226395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:30.226424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:30.226432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:30.226502Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:30.293688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:30.601986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:30.852142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:30.865031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:19:33.182492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488064607868521:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.182492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488064607868494:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.182569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.183005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488064607868524:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.183066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.186155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:33.196204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488064607868523:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:19:33.346015Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488064607868576:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:33.669967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.353398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2026-02-19T08:19:34.377885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.637244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488047427998584:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:34.638291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:34.674033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:19:34.688906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:19:34.696720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2026-02-19T08:19:35.656657Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488072809313702:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:35.659311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:35.674644Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:35.821149Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:35.821299Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:35.830386Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488072809313645:2081] 1771489175646979 != 1771489175646982 2026-02-19T08:19:35.839041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:35.841854Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:35.855743Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:35.916239Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:35.916262Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:35.916269Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:35.916345Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:36.050668Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:36.304407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:36.311054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:36.665833Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:38.986149Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488085694216426:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.986206Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488085694216406:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.986329Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: ... EvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828814Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037902 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828818Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037904 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828871Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037898 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828882Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037900 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828925Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828936Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037896 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.828992Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-02-19T08:20:00.835366Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:00.845055Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710667, at schemeshard: 72057594046644480 2026-02-19T08:20:00.846806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:02.297476Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488190649312338:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:02.297532Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:02.318806Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:02.404576Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:02.409152Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488190649312306:2081] 1771489202296430 != 1771489202296433 2026-02-19T08:20:02.443877Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:02.443945Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:02.448397Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:02.539599Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:02.554297Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:02.554322Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:02.554330Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:02.554405Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:03.309910Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:03.426625Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:03.438527Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:20:03.455430Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:20:03.549927Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-02-19T08:20:03.576451Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:20:06.801425Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488207829182466:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:06.801529Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:06.802063Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488207829182475:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:06.802126Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:06.808716Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488207829182480:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:06.826080Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:06.845823Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488207829182482:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-02-19T08:20:06.925217Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488207829182533:2601] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:06.965596Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:07.271284Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-02-19T08:20:07.295064Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:07.300084Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488190649312338:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:07.300143Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:07.593491Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:07.604853Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:07.621182Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:07.629454Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715672, at schemeshard: 72057594046644480 >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:55.019903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:55.020005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:55.020095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:55.020151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:55.020204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:55.020257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:55.020286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:55.020324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:55.020413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:55.020487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:55.021360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:55.021660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:55.113208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:55.113266Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:55.127671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:55.128337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:55.128553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:55.141394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:55.150751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:55.151699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:55.152170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:55.162161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:55.162451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:55.163824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:55.163902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:55.164022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:55.164070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:55.164171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:55.164405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:55.345674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.355923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.356011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.356079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.356145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:55.356237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 717:2701], Recipient [6:134:2156]: NKikimrTxColumnShard.TEvNotifyTxCompletionResult Origin: 72075186233409546 TxId: 101 2026-02-19T08:20:07.818285Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5341: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2026-02-19T08:20:07.818358Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-02-19T08:20:07.818408Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:20:07.818594Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-02-19T08:20:07.818787Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:20:07.823536Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:20:07.823606Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:07.823655Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2026-02-19T08:20:07.823811Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [6:134:2156], Recipient [6:134:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:20:07.823850Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:20:07.823917Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:20:07.823988Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:20:07.824130Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:07.824169Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:20:07.824208Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:07.824267Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:20:07.824307Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:07.824351Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:20:07.824436Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:750:2725] message: TxId: 101 2026-02-19T08:20:07.824495Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:07.824544Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:20:07.824584Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:20:07.824768Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:20:07.831846Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:07.831974Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [6:750:2725] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2026-02-19T08:20:07.832186Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:20:07.832253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:751:2726] 2026-02-19T08:20:07.832473Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [6:753:2728], Recipient [6:134:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:07.832517Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:07.832572Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:20:07.833288Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [6:794:2761], Recipient [6:134:2156]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2026-02-19T08:20:07.833342Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:20:07.835370Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:20:07.835724Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:361: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2026-02-19T08:20:07.840136Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2026-02-19T08:20:07.840485Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:07.843048Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:07.843377Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2026-02-19T08:20:07.843444Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:20:07.843787Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:20:07.843845Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:20:07.844264Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [6:800:2767], Recipient [6:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:20:07.844320Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:20:07.844359Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046678944 2026-02-19T08:20:07.844455Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [6:750:2725], Recipient [6:134:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-02-19T08:20:07.844490Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:20:07.844569Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:20:07.844681Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:20:07.844724Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:798:2765] 2026-02-19T08:20:07.844906Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [6:800:2767], Recipient [6:134:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:07.844943Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:07.844987Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-02-19T08:20:07.845317Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [6:801:2768], Recipient [6:134:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:20:07.845371Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:20:07.845469Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:07.845667Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 199us result status StatusPathDoesNotExist 2026-02-19T08:20:07.845894Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:54.337638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:54.337752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:54.337813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:54.337852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:54.337903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:54.337973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:54.338004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:54.338039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:54.338122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:54.338182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:54.339014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:54.339331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:54.436052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:54.436109Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:54.479860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:54.487414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:54.487650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:54.524360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:54.525102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:54.525852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:54.526208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:54.530093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:54.530322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:54.531561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:54.531620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:54.531723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:54.531782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:54.531894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:54.532102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:54.696031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.697684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.716033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.716187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.716264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:54.716347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ressOperation 2026-02-19T08:20:09.690683Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:20:09.690752Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2026-02-19T08:20:09.690816Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:09.690877Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2026-02-19T08:20:09.691095Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:20:09.692556Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:956:2898] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-02-19T08:20:09.694014Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-02-19T08:20:09.694102Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2026-02-19T08:20:09.694441Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-02-19T08:20:09.694611Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-02-19T08:20:09.694697Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2026-02-19T08:20:09.695712Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:09.695776Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-02-19T08:20:09.695908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-02-19T08:20:09.696316Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269287424, Sender [6:137:2159], Recipient [6:272:2270] 2026-02-19T08:20:09.696381Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5318: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:20:09.696498Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:09.696667Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 25769805935 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:09.696750Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-02-19T08:20:09.696962Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:20:09.697255Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:09.697353Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:20:09.697448Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:706: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2026-02-19T08:20:09.718570Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2026-02-19T08:20:09.719315Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:09.719397Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000041 first txId#104 countTxs#1 2026-02-19T08:20:09.719471Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000041 2026-02-19T08:20:09.719527Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-02-19T08:20:09.719788Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [6:127:2152], Recipient [6:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:20:09.719836Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:20:09.719939Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:09.720000Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:20:09.720336Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:09.720397Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:20:09.720923Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:20:09.720988Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:20:09.721153Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:09.721208Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:20:09.721256Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:20:09.721315Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:20:09.721365Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:20:09.721424Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:20:09.721499Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:20:09.721562Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:20:09.721611Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:20:09.730146Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:20:09.730290Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-02-19T08:20:09.730359Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-02-19T08:20:09.731475Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274137603, Sender [6:210:2210], Recipient [6:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Version: 4 } 2026-02-19T08:20:09.731543Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5383: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-02-19T08:20:09.731678Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:20:09.731840Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:20:09.731905Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:20:09.731973Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:20:09.732039Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:20:09.732179Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:20:09.732235Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:09.744156Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:09.744927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:09.745003Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo0Consumers [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink >> TProxyActorTest::TestAttachSession >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> TTicketParserTest::LoginBad >> TBackupCollectionTests::BackupWithIndexesDefault [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups |91.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TProxyActorTest::TestAttachSession [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 14429, msgbus: 28422 2026-02-19T08:14:26.709905Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486748841969237:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:26.714820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:26.950844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:26.986174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:26.986300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:27.004959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:27.065840Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:27.154806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:27.154848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:27.154855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:27.154938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:27.199083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:27.370340Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486748841969439:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:27.370386Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486753136937519:2740] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:27.370746Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486753136937519:2740] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.391776Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486753136937519:2740] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:27.397056Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486753136937519:2740] Handle TEvDescribeSchemeResult Forward to# [1:7608486753136937518:2739] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:27.399144Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486748841969439:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.399167Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486748841969439:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:14:27.399222Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486748841969439:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608486753136937524:2745] 2026-02-19T08:14:27.485920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486753136937524:2745] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:27.485997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486753136937524:2745] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:27.486015Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486753136937524:2745] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:27.486075Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486753136937524:2745] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:27.486494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486753136937524:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.486703Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486753136937524:2745] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:27.486849Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486753136937524:2745] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:14:27.486997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486753136937524:2745] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:14:27.487831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:27.490991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486753136937524:2745] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:14:27.491082Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486753136937524:2745] txid# 281474976715657 SEND to# [1:7608486753136937523:2744] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:14:27.504090Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486748841969439:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.504118Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486748841969439:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-02-19T08:14:27.504147Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486748841969439:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7608486753136937563:2781] 2026-02-19T08:14:27.506014Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486753136937563:2781] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:27.506075Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486753136937563:2781] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:27.506115Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486753136937563:2781] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:27.506182Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486753136937563:2781] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:27.506488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486753136937563:2781] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.506620Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486753136937563:2781] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:27.506691Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486753136937563:2781] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-02-19T08:14:27.506829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486753136937563:2781] txid# 281474976715658 HANDLE EvClientConnected 2026-02-19T08:14:27.507337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:27.511613Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486753136937563:2781] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-02-19T08:14:27.511659Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486753136937563:2781] txid# 281474976715658 SEND to# [1:7608486753136937562:2780] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-02-19T08:14:27.557210Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486748841969439:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.557284Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486748841969439:2143] TxId# 281474976715659 ProcessProposeTransaction 2026-02-19T08:14:27.557351Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [ ... ErrorCount# 0 2026-02-19T08:20:01.261489Z node 59 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [59:7608488186636963961:3018] txid# 281474976715663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2026-02-19T08:20:01.261579Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488186636963961:3018] txid# 281474976715663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:20:01.261604Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488186636963961:3018] txid# 281474976715663 SEND to# [59:7608488186636963960:2353] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:01.262087Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=MTczZTBlNDEtNzA4ZTJlNzgtNzViYzcxMGQtZTJiYjcwYjY=, ActorId: [59:7608488186636963946:2353], ActorState: ExecuteState, LegacyTraceId: 01khtfr3278rfs12552hdh3jek, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:01.262618Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488152277224225:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:01.262643Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488152277224225:2116] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-02-19T08:20:02.881516Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488152277224225:2116] Handle TEvProposeTransaction 2026-02-19T08:20:02.881547Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488152277224225:2116] TxId# 281474976715665 ProcessProposeTransaction 2026-02-19T08:20:02.881585Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488152277224225:2116] Cookie# 0 userReqId# "" txid# 281474976715665 SEND to# [59:7608488190931931293:3031] 2026-02-19T08:20:02.883608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488190931931293:3031] txid# 281474976715665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49236" 2026-02-19T08:20:02.883698Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488190931931293:3031] txid# 281474976715665 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:20:02.883721Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488190931931293:3031] txid# 281474976715665 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2026-02-19T08:20:02.883773Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488190931931293:3031] txid# 281474976715665 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:02.884120Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488190931931293:3031] txid# 281474976715665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:02.884168Z node 59 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [59:7608488190931931293:3031] txid# 281474976715665, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2026-02-19T08:20:02.884260Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488190931931293:3031] txid# 281474976715665, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:20:02.884288Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488190931931293:3031] txid# 281474976715665 SEND to# [59:7608488190931931292:2353] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:02.885066Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=MTczZTBlNDEtNzA4ZTJlNzgtNzViYzcxMGQtZTJiYjcwYjY=, ActorId: [59:7608488186636963946:2353], ActorState: ExecuteState, LegacyTraceId: 01khtfr4nf5tet8hewptypzn7f, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:02.885563Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488152277224225:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:02.885585Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488152277224225:2116] TxId# 281474976715666 ProcessProposeKqpTransaction 2026-02-19T08:20:05.251837Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488152277224225:2116] Handle TEvProposeTransaction 2026-02-19T08:20:05.251869Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488152277224225:2116] TxId# 281474976715667 ProcessProposeTransaction 2026-02-19T08:20:05.251912Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488152277224225:2116] Cookie# 0 userReqId# "" txid# 281474976715667 SEND to# [59:7608488203816833229:3055] 2026-02-19T08:20:05.253805Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488203816833229:3055] txid# 281474976715667 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49236" 2026-02-19T08:20:05.253880Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488203816833229:3055] txid# 281474976715667 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:20:05.253900Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488203816833229:3055] txid# 281474976715667 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2026-02-19T08:20:05.253953Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488203816833229:3055] txid# 281474976715667 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:05.254259Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488203816833229:3055] txid# 281474976715667 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:05.254307Z node 59 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [59:7608488203816833229:3055] txid# 281474976715667, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2026-02-19T08:20:05.254394Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488203816833229:3055] txid# 281474976715667, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:20:05.254421Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488203816833229:3055] txid# 281474976715667 SEND to# [59:7608488203816833228:2353] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:05.255120Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=MTczZTBlNDEtNzA4ZTJlNzgtNzViYzcxMGQtZTJiYjcwYjY=, ActorId: [59:7608488186636963946:2353], ActorState: ExecuteState, LegacyTraceId: 01khtfr6zb4038n33fe73g2ren, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:05.255577Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488152277224225:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:05.255597Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488152277224225:2116] TxId# 281474976715668 ProcessProposeKqpTransaction 2026-02-19T08:20:08.914081Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:20:08.914118Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:09.893825Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488152277224225:2116] Handle TEvProposeTransaction 2026-02-19T08:20:09.893863Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488152277224225:2116] TxId# 281474976715669 ProcessProposeTransaction 2026-02-19T08:20:09.893915Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488152277224225:2116] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [59:7608488220996702489:3087] 2026-02-19T08:20:09.895888Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488220996702489:3087] txid# 281474976715669 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:43046" 2026-02-19T08:20:09.895948Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488220996702489:3087] txid# 281474976715669 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:20:09.895966Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488220996702489:3087] txid# 281474976715669 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2026-02-19T08:20:09.896016Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488220996702489:3087] txid# 281474976715669 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:09.896329Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488220996702489:3087] txid# 281474976715669 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:09.896378Z node 59 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [59:7608488220996702489:3087] txid# 281474976715669, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2026-02-19T08:20:09.896466Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488220996702489:3087] txid# 281474976715669, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2026-02-19T08:20:09.896496Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488220996702489:3087] txid# 281474976715669 SEND to# [59:7608488220996702488:2353] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:09.898120Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=MTczZTBlNDEtNzA4ZTJlNzgtNzViYzcxMGQtZTJiYjcwYjY=, ActorId: [59:7608488186636963946:2353], ActorState: ExecuteState, LegacyTraceId: 01khtfrbg610g7x391wfcswftc, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:09.902004Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488152277224225:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:09.902039Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488152277224225:2116] TxId# 281474976715670 ProcessProposeKqpTransaction |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TableCreator::CreateTables [GOOD] >> TProxyActorTest::TestCreateSemaphore |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |91.9%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2026-02-19T08:19:20.158461Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488010769443407:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:20.158543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:20.444916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:20.501925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:20.502039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:20.554152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:20.565321Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:20.713353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:20.713387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:20.713398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:20.713482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:20.715568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:20.942347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) Triggering split by load 2026-02-19T08:19:21.209688Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:23.962378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023654346606:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.962539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.962984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488023654346619:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.963029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.336113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:24.621624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314078:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.621729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.622032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314081:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.622071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.664601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-02-19T08:19:24.840871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314200:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314215:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314217:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314218:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314216:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.841547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314214:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.888637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314239:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.888688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314242:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.889033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314244:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.889234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.890808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2026-02-19T08:19:24.891024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.891050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2026-02-19T08:19:24.893207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314282:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.893262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488027949314288:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.893335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.891161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.894402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976715660:2, at schemeshard: 72057594046644480 2026-02-19T08:19:24.894620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.894720Z node 1 ... _operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-02-19T08:19:44.711773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-02-19T08:19:44.714990Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:19:44.716674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:19:44.731448Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:19:44.734405Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:19:44.734529Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:19:44.734646Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 Table has 2 shards Loading new shards 2026-02-19T08:19:59.632117Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.0744 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037889 table: [/Root/Foo] 2026-02-19T08:20:04.638039Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.0744 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037889 table: [/Root/Foo] Table has 2 shards Fast forward > 10h to trigger the merge 2026-02-19T08:20:09.746689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:580: Propose merge request: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976710694 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2026-02-19T08:20:09.746914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-02-19T08:20:09.747603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\356\200\207z\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\356\200\207z\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-02-19T08:20:09.747639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:20:09.755016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:20:09.767927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976710694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:20:09.768038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710694:0 CreateParts -> ConfigureParts 2026-02-19T08:20:09.777563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2026-02-19T08:20:09.785683Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7608488221222926729:10795] 2026-02-19T08:20:09.814663Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-02-19T08:20:09.814801Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-02-19T08:20:09.815030Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:20:09.828089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710694 TabletId: 72075186224037891 2026-02-19T08:20:09.828144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710694:0 ConfigureParts -> TransferData 2026-02-19T08:20:09.830909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:09.864312Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-02-19T08:20:09.864428Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:20:09.864472Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:20:09.864501Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037891 2026-02-19T08:20:09.864760Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:20:09.871620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037889 2026-02-19T08:20:09.871977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037890 2026-02-19T08:20:09.872286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710694:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:20:09.874669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:20:09.874979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:20:09.875039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:09.876965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710694 2026-02-19T08:20:09.877007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2026-02-19T08:20:09.877027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-02-19T08:20:09.893203Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-02-19T08:20:09.895547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-02-19T08:20:09.899624Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-02-19T08:20:09.901388Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:09.901896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-02-19T08:20:09.901986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-02-19T08:20:09.902010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2026-02-19T08:20:09.902058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2026-02-19T08:20:09.905600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2026-02-19T08:20:09.905906Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:09.906365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:09.909487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:09.912799Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:20:09.913417Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:20:09.913654Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:20:09.913761Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:20:09.929798Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:20:09.930248Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-02-19T08:20:09.931599Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:20:09.931696Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 >> TProxyActorTest::TestDisconnectWhileAttaching >> TProxyActorTest::TestCreateSemaphore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifo0Consumers [GOOD] Test command err: 2026-02-19T08:18:12.493880Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487718801919664:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:12.574458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:12.615768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:18:12.937376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:12.984616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:12.984711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:13.113546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:13.194930Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:13.197984Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487718801919615:2081] 1771489092480595 != 1771489092480598 2026-02-19T08:18:13.216360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:13.366528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:13.366550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:13.366567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:13.366648Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:13.566864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:13.815081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:13.829033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:18:14.051041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:14.067083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:18:14.071826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:14.101664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:18:14.191930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.386165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.446852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-02-19T08:18:14.455719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.566071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-02-19T08:18:14.577061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.643933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.684396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.716599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.761354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.804768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:14.842300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.149407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487740276757763:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.149483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487740276757771:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.149558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.153840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487740276757778:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.153981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:17.156326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:17.183073Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487740276757777:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:18:17.255959Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487740276757831:3108] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:17.485123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487718801919664:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:17.485236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:17.864311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:17.926307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operat ... 9T08:20:12.388430Z node 7 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771489212430, TxId 281474976710689 2026-02-19T08:20:12.388451Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:12.388464Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:20:12.388474Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:12.388496Z node 7 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:20:12.388646Z node 7 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976710689 2026-02-19T08:20:12.388663Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:20:12.388676Z node 7 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:20:12.388689Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:20:12.388910Z node 7 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:20:12.394017Z node 7 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:20:12.394243Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:20:12.394355Z node 7 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:20:12.394379Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:12.394392Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.394404Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:12.394419Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.394431Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:20:12.394461Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:20:12.394818Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 36000 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ExampleQueueName" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/ExampleQueueName" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "regular-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_STREAMING } MonitoringProjectId: "" 2026-02-19T08:20:12.394883Z node 7 :PERSQUEUE NOTICE: pq_impl.cpp:948: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:23589 { status: SUCCESS, issues: }regular-consumer 2026-02-19T08:20:12.432964Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:52114) incoming connection opened 2026-02-19T08:20:12.433031Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:52114) -> (POST /Root, 117 bytes) 2026-02-19T08:20:12.433164Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f814:68a5:497c:0:e014:68a5:497c:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 60a56aef-c4c0206a-706bddcd-d78792ce 2026-02-19T08:20:12.433535Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [60a56aef-c4c0206a-706bddcd-d78792ce] got new request from [f814:68a5:497c:0:e014:68a5:497c:0] database '/Root' stream '' 2026-02-19T08:20:12.434096Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-02-19T08:20:12.434173Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-02-19T08:20:12.434211Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-02-19T08:20:12.434245Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-02-19T08:20:12.434279Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-02-19T08:20:12.434313Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-02-19T08:20:12.434348Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-02-19T08:20:12.452904Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-02-19T08:20:12.453011Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453052Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453084Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453113Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453144Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453173Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-02-19T08:20:12.453266Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-02-19T08:20:12.453798Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [60a56aef-c4c0206a-706bddcd-d78792ce] [auth] Authorized successfully 2026-02-19T08:20:12.453867Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [60a56aef-c4c0206a-706bddcd-d78792ce] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:20:12.456312Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [60a56aef-c4c0206a-706bddcd-d78792ce] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.NonExistentQueue 2026-02-19T08:20:12.456436Z node 7 :SQS INFO: http_req.cpp:1237: Request [60a56aef-c4c0206a-706bddcd-d78792ce] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [f814:68a5:497c:0:e014:68a5:497c:0] Duration [29ms] Code [400] Response [The specified queue doesn't exist (consumer: "ydb-sqs-consumer-1")] 2026-02-19T08:20:12.456458Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [60a56aef-c4c0206a-706bddcd-d78792ce] reply with status: STATUS_UNDEFINED message: The specified queue doesn't exist (consumer: "ydb-sqs-consumer-1") 2026-02-19T08:20:12.456712Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:52114) <- (400 AWS.SimpleQueueService.NonExistentQueue, 133 bytes) 2026-02-19T08:20:12.456765Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:52114) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "All" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-1" } 2026-02-19T08:20:12.456794Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:52114) Response: HTTP/1.1 400 AWS.SimpleQueueService.NonExistentQueue Connection: close x-amzn-requestid: 60a56aef-c4c0206a-706bddcd-d78792ce Content-Type: application/x-amz-json-1.1 Content-Length: 133 2026-02-19T08:20:12.456882Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:52114) connection closed Http output full {"__type":"AWS.SimpleQueueService.NonExistentQueue","message":"The specified queue doesn't exist (consumer: \"ydb-sqs-consumer-1\")"} 2026-02-19T08:20:12.486205Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:12.486236Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.486247Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:12.486266Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.486276Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:20:12.544115Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608488234334285653:2465]: Pool not found 2026-02-19T08:20:12.544861Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-02-19T08:20:12.589825Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:12.589862Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.589875Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:12.589895Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:12.589906Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |91.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2026-02-19T08:20:08.871830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488217577548532:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:08.871910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:09.410576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:09.410678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:09.466578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:20:09.498818Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:09.506320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488217577548382:2081] 1771489208772470 != 1771489208772473 2026-02-19T08:20:09.533139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:09.728853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:20:09.892951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:10.202637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:10.202659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:10.202665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:10.202737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:10.273236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:10.301926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:20:10.317055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:10.322793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.875003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488217577548532:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:13.875080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap >> Worker::Basic [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |91.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR cookie 0 ... waiting for blocked registrations (done) 2026-02-19T08:20:16.902610Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpExplain::UpdateOnSecondary+UseSink+UseStreamIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2026-02-19T08:20:04.724525Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488196760491746:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:04.724617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:05.236291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:05.236405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:05.307677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:05.350395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:05.351332Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488196760491627:2081] 1771489204665883 != 1771489204665886 2026-02-19T08:20:05.361029Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:05.532175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:05.723042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:06.063327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:06.063359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:06.063393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:06.063482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:06.549274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:06.836589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:07.081944Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handshake: worker# [1:7608488209645394548:2610] 2026-02-19T08:20:07.082012Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handshake: worker# [1:7608488209645394548:2610] 2026-02-19T08:20:07.083403Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:07.089876Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:07.089967Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Send handshake: worker# [1:7608488209645394548:2610] 2026-02-19T08:20:07.090009Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Create read session: session# [1:7608488209645394553:2490] 2026-02-19T08:20:07.090061Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-02-19T08:20:07.090073Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:173: [Worker][1:7608488209645394548:2610] Handshake with writer: sender# [1:7608488209645394550:2610] 2026-02-19T08:20:07.090085Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-02-19T08:20:07.090096Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7608488209645394548:2610] Handshake with reader: sender# [1:7608488209645394549:2610] 2026-02-19T08:20:07.091645Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:07.124829Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_5645783951133314329_v1 } } 2026-02-19T08:20:07.194706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:09.730143Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488196760491746:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:09.744478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:10.238904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488222530296618:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.239120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.239833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488222530296634:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.239894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488222530296635:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.239922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488222530296636:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.240079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:10.244674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:10.387761Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488222530296642:2692] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:20:10.399672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488222530296640:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:20:10.399745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488222530296641:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:20:10.460825Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488222530296690:2724] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:10.504469Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488222530296708:2732] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:12.142918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at sc ... pp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:20:16.226855Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-02-19T08:20:16.102000Z WriteTime: 2026-02-19T08:20:16.105000Z MessageGroupId: producer ProducerId: producer }] } } 2026-02-19T08:20:16.226965Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-02-19T08:20:16.102000Z WriteTime: 2026-02-19T08:20:16.105000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.227058Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-02-19T08:20:16.102000Z WriteTime: 2026-02-19T08:20:16.105000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.227217Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2026-02-19T08:20:16.227378Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:16.227414Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-02-19T08:20:16.227485Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-02-19T08:20:16.234797Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:16.234864Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-02-19T08:20:16.234911Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2026-02-19T08:20:16.234972Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.235021Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.487909Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-02-19T08:20:16.463000Z WriteTime: 2026-02-19T08:20:16.478000Z MessageGroupId: producer ProducerId: producer }] } } 2026-02-19T08:20:16.487988Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-02-19T08:20:16.463000Z WriteTime: 2026-02-19T08:20:16.478000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.488047Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-02-19T08:20:16.463000Z WriteTime: 2026-02-19T08:20:16.478000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.488142Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2026-02-19T08:20:16.488229Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-02-19T08:20:16.493083Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:16.493157Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-02-19T08:20:16.493192Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-02-19T08:20:16.493238Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.493278Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.718887Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-02-19T08:20:16.705000Z WriteTime: 2026-02-19T08:20:16.706000Z MessageGroupId: producer ProducerId: producer }] } } 2026-02-19T08:20:16.718977Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-02-19T08:20:16.705000Z WriteTime: 2026-02-19T08:20:16.706000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.719040Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-02-19T08:20:16.705000Z WriteTime: 2026-02-19T08:20:16.706000Z MessageGroupId: producer ProducerId: producer }] } 2026-02-19T08:20:16.719151Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2026-02-19T08:20:16.719238Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-02-19T08:20:16.720695Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7608488248300101170:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:16.720749Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-02-19T08:20:16.720795Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7608488209645394550:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2026-02-19T08:20:16.720844Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.720891Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-02-19T08:20:16.863999Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2026-02-19T08:20:16.864044Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7608488209645394549:2610] Leave 2026-02-19T08:20:16.864167Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:267: [Worker][1:7608488209645394548:2610] Reader has gone: sender# [1:7608488209645394549:2610]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2026-02-19T08:20:16.864239Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7608488248300101351:2610] Handshake: worker# [1:7608488209645394548:2610] 2026-02-19T08:20:16.867529Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7608488248300101351:2610] Create read session: session# [1:7608488248300101352:2490] 2026-02-19T08:20:16.867610Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7608488209645394548:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-02-19T08:20:16.867623Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7608488209645394548:2610] Handshake with reader: sender# [1:7608488248300101351:2610] 2026-02-19T08:20:16.867665Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7608488248300101351:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } >> TContinuousBackupWithRebootsTests::Basic |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk >> TS3WrapperTests::UploadUnknownPart >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondary+UseSink+UseStreamIndex [GOOD] Test command err: 2026-02-19T08:19:32.741811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488061468629944:2154];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:32.742257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:33.045817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:33.159960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:33.160054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:33.175499Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:33.176614Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488061468629811:2081] 1771489172697539 != 1771489172697542 2026-02-19T08:19:33.209649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:33.311273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:33.387761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:33.387780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:33.387789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:33.387853Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:33.740323Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:33.879514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:33.886293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:33.944952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.116498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.326591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.413985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.658718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488078648500885:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.658840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.659444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488078648500895:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.659504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.044120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.099353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.137741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.201008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.247179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.296050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.354492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.427891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.545774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488082943469059:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.545852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.546155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488082943469064:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.546186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488082943469065:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.546247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.550029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:37.561015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488082943469068:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:37.616794Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488082943469121:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:37.724894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488061468629944:2154];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:37.724997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"Tables":["EightShard"],"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Ite ... 19T08:20:08.106429Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:08.106521Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:08.113011Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:08.858986Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:08.953922Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:08.962298Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:08.972592Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:09.116673Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:09.435982Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:09.560856Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:12.854113Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488211340263153:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:12.854190Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:13.159246Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488237110068783:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.159358Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.159660Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488237110068793:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.159694Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.239506Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.292586Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.340557Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.406342Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.473578Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.604114Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.709212Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.817582Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:13.951654Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488237110069670:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.951761Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.952120Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488237110069675:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.952177Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488237110069676:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.952352Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:13.957066Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:13.985990Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608488237110069679:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:20:14.083768Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608488241405037026:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:17.247441Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:17.339261Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:17.399674Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":2,"Operators":[{"Inputs":[],"Path":"\/Root\/SecondaryKeys","Name":"Update","SinkType":"KqpTableSink","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"[{Fk: 100,Key: 100,Value: \"test\"}]","Name":"Iterator"}],"Node Type":"ConstantExpr"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Fk","Key","Value"],"type":"MultiUpdate"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Update","SinkType":"KqpTableSink","Table":"SecondaryKeys"}],"Node Type":"Update"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2026-02-19T08:19:29.677695Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488050304326793:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:29.677774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:29.799194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:30.197680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:30.260223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:30.260351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:30.319774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:30.324869Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:30.474503Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.019160s 2026-02-19T08:19:30.646020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:30.646483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:30.646491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:30.646501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:30.646584Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:30.710958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:31.059893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:33.908765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488067484197296:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.908927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.909439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488067484197306:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:33.909496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.314592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:34.645174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164786:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.652538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.655687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164793:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.658085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.659195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164792:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.661987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164827:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.662043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164833:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.662050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164834:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.662121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164835:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.662126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164831:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.662170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.664165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164848:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.665826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.665895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164858:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.665948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164859:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.668879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:34.669540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164900:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.669550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164897:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.669636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.669729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.675839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164909:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.675934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488071779164913:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:34.675977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... opose, tableStr: /Root/Foo, tableId: , opId: 281474976715694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000=bsI\000\000\000\200" } SchemeshardId: 72057594046644480 2026-02-19T08:20:14.751394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\374:+\213\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000=bsI\000\000\000\200" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000=bsI\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\374:+\213\000\000\000\200" ShardIdx: 5 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000=bsI\000\000\000\200" } SchemeshardId: 72057594046644480 2026-02-19T08:20:14.751428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:20:14.762878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:20:14.782591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:20:14.782884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:20:14.782928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715694:0 CreateParts -> ConfigureParts 2026-02-19T08:20:14.789582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715694:0, at schemeshard: 72057594046644480 2026-02-19T08:20:14.799013Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7608488243577971524:3531] 2026-02-19T08:20:14.804102Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7608488243577971527:3528] 2026-02-19T08:20:14.837606Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-02-19T08:20:14.837700Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-02-19T08:20:14.837911Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:20:14.843478Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037892 2026-02-19T08:20:14.843577Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037892, state: WaitScheme 2026-02-19T08:20:14.843719Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-02-19T08:20:14.844666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715694 TabletId: 72075186224037891 2026-02-19T08:20:14.848749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715694 TabletId: 72075186224037892 2026-02-19T08:20:14.848790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715694:0 ConfigureParts -> TransferData 2026-02-19T08:20:14.850340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:14.874694Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-02-19T08:20:14.874781Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:20:14.874818Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037892 2026-02-19T08:20:14.874841Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037892 2026-02-19T08:20:14.875122Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-02-19T08:20:14.875253Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-02-19T08:20:14.875320Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:20:14.875370Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:20:14.875389Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037891 2026-02-19T08:20:14.875609Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-02-19T08:20:14.879467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715694 TabletId: 72075186224037889 2026-02-19T08:20:14.879811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715694:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:20:14.881551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:20:14.881827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:20:14.881888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:14.882591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715694 2026-02-19T08:20:14.882639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715694 2026-02-19T08:20:14.882655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-02-19T08:20:14.889685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-02-19T08:20:14.889766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-02-19T08:20:14.889784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-02-19T08:20:14.889818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715694:0 2026-02-19T08:20:14.891652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715694:0 2026-02-19T08:20:14.973490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:20:14.973654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.999808s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:20:14.973699Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488050304327114:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037892 2026-02-19T08:20:14.977275Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488050304327114:2202] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2026-02-19T08:20:14.995055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 21, next wakeup# 14.978418s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:20:14.997979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, shardIdx# 72057594046644480:5 in# 24, next wakeup# 14.975491s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:20:15.000066Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037891 CompletedLoansChanged 2026-02-19T08:20:15.003210Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037892 CompletedLoansChanged 2026-02-19T08:20:15.012899Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-02-19T08:20:15.016822Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:15.018023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:15.023672Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:20:15.024404Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:20:15.024482Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:20:15.024997Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Table has 3 shards |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2026-02-19T08:20:21.628279Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# BAD07D3A-8BF4-48F8-8EB8-E9B725F8473D, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:10896 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 981C3CC4-0632-4D5F-8404-07C9D894332F amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2026-02-19T08:20:21.666859Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# BAD07D3A-8BF4-48F8-8EB8-E9B725F8473D, response# |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |91.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> LocalTableWriter::SupportedTypes |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::CreateTableAs+Stats >> LocalTableWriter::DecimalKeys >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::StringEscaping >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::ExecuteWriteQuery >> LocalTableWriter::ApplyInCorrectOrder >> LocalTableWriter::WaitTxIds |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |92.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> LocalTableWriter::ConsistentWrite >> LocalTableWriter::WriteTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: 2026-02-19T08:15:17.898622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486965065004634:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:17.898714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:15:18.260810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:15:18.294286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:15:18.294411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:15:18.373515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:15:18.411897Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:15:18.413208Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608486965065004409:2081] 1771488917850308 != 1771488917850311 2026-02-19T08:15:18.527934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:15:18.527959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:15:18.527966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:15:18.528054Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:15:18.558863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:15:18.896768Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:15:19.071429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:15:19.123357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.329284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.525460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:19.606119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:21.890387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486982244875470:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.890516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.893006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486982244875480:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:21.893119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.229258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.293421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.357037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.429080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.488467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.567059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.626506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.728981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:22.834905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486986539843649:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.834980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.835283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486986539843654:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.835318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486986539843655:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.835421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:15:22.839556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:15:22.853878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608486986539843658:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:15:22.901402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608486965065004634:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:15:22.901533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:15:22.962041Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608486986539843713:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:15:24.832782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:15:33.219991Z no ... 762515]; 2026-02-19T08:17:18.300103Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:18.336533Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:18.426945Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:18.427047Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:18.427803Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:18.463420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:18.580362Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:18.586886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:18.586913Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:18.586923Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:18.587252Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:19.051093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:19.058460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:19.070708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:19.153051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:19.324718Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:19.401173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:19.509459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.137513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487502219751021:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.137636Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.138033Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487502219751031:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.138392Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.355900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.389532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.417579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.444512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.479004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.513640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.546116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.593705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:22.709002Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487502219751903:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.709124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.709468Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487502219751908:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.709513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487502219751909:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.709635Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:22.716321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:22.745880Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487502219751912:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:17:22.813337Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608487502219751963:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:17:23.304066Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608487485039879998:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:23.304114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:24.922909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:33.417525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:17:33.417561Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded finished with status: SUCCESS >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |92.0%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |92.0%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk |92.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: 2026-02-19T08:19:48.907830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488130052076134:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:48.909254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:49.197759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:49.229220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:49.229313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:49.296361Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488130052076100:2081] 1771489188904103 != 1771489188904106 2026-02-19T08:19:49.310248Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:49.312933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:49.486507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:49.536551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:49.536572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:49.536579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:49.536678Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:49.920356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:50.050102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:50.061527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:52.521459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147231946180:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.533916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.534439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147231946190:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.534522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.937848Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488147231946203:2505] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2026-02-19T08:19:52.966917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147231946214:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.966987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.967572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147231946217:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.967639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.986710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.138303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488151526913605:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.138381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.138812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488151526913608:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.138874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.163513Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488151526913619:2577] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2026-02-19T08:19:54.640366Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:54.640467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:54.716907Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488153553684398:2081] 1771489194515320 != 1771489194515323 2026-02-19T08:19:54.726101Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:54.727700Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:54.727763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:54.731713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:54.877097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:54.877128Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:54.877135Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:54.877205Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:54.893038Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:55.327957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:55.573341Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:59.183129Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488175028521773:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.183240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.186008Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488175028521783:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.186099Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.199933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.267122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp ... log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.189576Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.190578Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.191172Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.191511Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.192216Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.192462Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.193175Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.193475Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.194713Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.194936Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.195636Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.195960Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.196626Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.196864Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.197485Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.197702Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.198590Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.198682Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.199479Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.199554Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:20:24.205892Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037960;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> LocalTableWriter::SupportedTypes [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> LocalTableWriter::DecimalKeys [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::MultiTxStatsFullScan |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_replication/ydb-core-tx-replication-controller-ut_replication |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_replication/ydb-core-tx-replication-controller-ut_replication >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> LocalTableWriter::WaitTxIds [GOOD] |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_replication/ydb-core-tx-replication-controller-ut_replication |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 2487, msgbus: 8844 2026-02-19T08:14:12.506303Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486685836329784:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:12.506427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:12.751884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:12.795706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:12.795809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:12.816434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:12.892592Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:13.044089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:13.070790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:13.070808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:13.070813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:13.070887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:13.402544Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486685836329908:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:13.402618Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486690131297977:2731] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:13.403086Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486690131297977:2731] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:13.434771Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486690131297977:2731] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:13.441671Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486690131297977:2731] Handle TEvDescribeSchemeResult Forward to# [1:7608486690131297976:2730] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:13.449988Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486685836329908:2143] Handle TEvProposeTransaction 2026-02-19T08:14:13.450022Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486685836329908:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:14:13.450139Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486685836329908:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486690131297983:2737] 2026-02-19T08:14:13.522003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:14:13.616669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486690131297983:2737] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:13.616988Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486690131297983:2737] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-02-19T08:14:13.617011Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486690131297983:2737] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:13.617111Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486690131297983:2737] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:13.617451Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486690131297983:2737] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:13.617568Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486690131297983:2737] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:13.617676Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486690131297983:2737] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:14:13.617826Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486690131297983:2737] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:14:13.618619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:13.623542Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486690131297983:2737] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:14:13.623620Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486690131297983:2737] txid# 281474976710657 SEND to# [1:7608486690131297982:2736] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:14:13.647475Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486685836329908:2143] Handle TEvProposeTransaction 2026-02-19T08:14:13.647505Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486685836329908:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:14:13.647537Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486685836329908:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486690131298031:2772] 2026-02-19T08:14:13.649349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486690131298031:2772] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:13.649409Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486690131298031:2772] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-02-19T08:14:13.649431Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486690131298031:2772] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:13.649505Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486690131298031:2772] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:13.651030Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486690131298031:2772] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:13.651183Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486690131298031:2772] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:13.651276Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486690131298031:2772] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:14:13.651456Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486690131298031:2772] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:14:13.652065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:13.655909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486690131298031:2772] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:14:13.655981Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486690131298031:2772] txid# 281474976710658 SEND to# [1:7608486690131298030:2771] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:14:16.101533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608486703016200060:2334], DatabaseId: /dc-1, PoolId: default, Failed to ... ESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.363790Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.363801Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2026-02-19T08:20:23.363814Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 27], version: 4 2026-02-19T08:20:23.363827Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 27] was 1 2026-02-19T08:20:23.363891Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 2026-02-19T08:20:23.364071Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.364171Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.364209Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.364242Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.364275Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375174Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375282Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375307Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375341Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375362Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375401Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375431Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375467Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375492Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375516Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375537Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375559Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375578Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375598Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375618Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375640Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375660Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375681Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375703Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375722Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375743Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375764Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375784Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375808Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375831Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.375850Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-02-19T08:20:23.929909Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [59:7608488221689283531:2143] Handle TEvNavigate describe path /dc-1/tenant-db 2026-02-19T08:20:23.930052Z node 59 :TX_PROXY DEBUG: describe.cpp:270: Actor# [59:7608488281818827488:3428] HANDLE EvNavigateScheme /dc-1/tenant-db 2026-02-19T08:20:23.930673Z node 59 :TX_PROXY DEBUG: describe.cpp:354: Actor# [59:7608488281818827488:3428] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:23.930893Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7608488281818827488:3428] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2026-02-19T08:20:23.939232Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7608488281818827488:3428] Handle TEvDescribeSchemeResult Forward to# [59:7608488281818827487:3427] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725657 CreateStep: 1771489212550 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 38 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 38 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER HashesInitParams { HashType: Argon InitParams: "J05F+BxQVUDPV0l233DjMw==" } HashesInitParams { HashType: ScramSha256 InitParams: "4096:Untf+MhnbDjrIpngKROcGQ==" } } Audience: "/dc-1/tenant-db" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186224037891 2026-02-19T08:20:24.010015Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2026-02-19T08:20:24.010669Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:20:24.018350Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:24.469142Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:20:24.469185Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> LocalTableWriter::StringEscaping [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2026-02-19T08:20:24.594123Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488284194839640:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:24.594189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:25.040430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:25.040524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:25.153428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:25.213588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:25.302007Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488284194839413:2081] 1771489224546597 != 1771489224546600 2026-02-19T08:20:25.319480Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:25.393798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:25.589890Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:25.981574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:25.981602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:25.981609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:25.981726Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:26.338166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:26.351074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:26.563137Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handshake: worker# [1:7608488292784774847:2492] 2026-02-19T08:20:26.563427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:26.563899Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:26.563947Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Send handshake: worker# [1:7608488292784774847:2492] 2026-02-19T08:20:26.564792Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:26.565565Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2026-02-19T08:20:26.565902Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488292784774940:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:26.565944Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:26.566214Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488292784774940:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2026-02-19T08:20:26.584663Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488292784774940:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:26.584710Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:26.584760Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488292784774937:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/ut/libs-storage-partition_direct-ut >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::ConsistentWrite [GOOD] |92.0%| [LD] {RESULT} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/ut/libs-storage-partition_direct-ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/nbs/cloud/blockstore/libs/storage/partition_direct/ut/libs-storage-partition_direct-ut >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2026-02-19T08:20:24.954781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488282430292118:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:24.954992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:25.593842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:25.634341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:25.634460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:25.813870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488282430291902:2081] 1771489224899802 != 1771489224899805 2026-02-19T08:20:25.845200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:25.845329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:25.846574Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:25.942030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:26.342062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:26.342272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:26.342280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:26.342287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:26.342432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:26.842637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:26.853333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:27.117228Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handshake: worker# [1:7608488291020227338:2493] 2026-02-19T08:20:27.117538Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:27.117815Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:27.117855Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Send handshake: worker# [1:7608488291020227338:2493] 2026-02-19T08:20:27.122008Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:27.122232Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2026-02-19T08:20:27.122447Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488295315194729:2553] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:27.122482Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.122577Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488295315194729:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2026-02-19T08:20:27.126925Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488295315194729:2553] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:27.126984Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.127026Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488295315194725:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> LocalTableWriter::ApplyInCorrectOrder [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2026-02-19T08:20:25.625661Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488289076020250:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:25.625724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:25.709938Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.016038s 2026-02-19T08:20:26.111846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:26.111928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:26.126403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:26.222292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:26.274988Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:26.293873Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488289076020022:2081] 1771489225606141 != 1771489225606144 2026-02-19T08:20:26.397795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:26.617996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:26.806401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:26.806428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:26.806437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:26.806510Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:27.204710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:27.211349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:27.214834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:27.391971Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handshake: worker# [1:7608488297665955543:2554] 2026-02-19T08:20:27.392279Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:27.392510Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:27.392550Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Send handshake: worker# [1:7608488297665955543:2554] 2026-02-19T08:20:27.392901Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:27.398037Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-02-19T08:20:27.398211Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-02-19T08:20:27.398378Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488297665955546:2553] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:27.398453Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.398547Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488297665955546:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:27.400350Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488297665955546:2553] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:27.400409Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.400446Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-02-19T08:20:28.399205Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-02-19T08:20:28.399377Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2026-02-19T08:20:28.399513Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488297665955546:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-02-19T08:20:28.403267Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488297665955546:2553] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.403348Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.403404Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488297665955542:2553] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2026-02-19T08:20:25.030207Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488285246820427:2260];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:25.030260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:25.704158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:25.747072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:25.747172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:25.905069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:25.915937Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:25.921124Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488285246820196:2081] 1771489224969056 != 1771489224969059 2026-02-19T08:20:25.961845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:25.965667Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:26.013821Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.008513s 2026-02-19T08:20:26.529234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:26.529255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:26.529262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:26.529331Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:27.175214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:27.193110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:27.417572Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handshake: worker# [1:7608488298131723016:2552] 2026-02-19T08:20:27.417873Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:27.418125Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:27.418163Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Send handshake: worker# [1:7608488298131723016:2552] 2026-02-19T08:20:27.418639Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:27.422968Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-02-19T08:20:27.429288Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-02-19T08:20:27.429458Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298131723019:2551] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:27.429495Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.429568Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298131723019:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:27.434361Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298131723019:2551] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:27.434419Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.434458Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298131723015:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |92.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] Test command err: 2026-02-19T08:20:25.182978Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488289490453478:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:25.183056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:25.681805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:25.681964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:25.758599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:25.787836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:25.836487Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:25.843199Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488289490453446:2081] 1771489225176851 != 1771489225176854 2026-02-19T08:20:25.960293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:26.221098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:26.366310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:26.366335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:26.366343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:26.366439Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:26.870253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:26.890429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:20:26.894472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:27.101791Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handshake: worker# [1:7608488293785421587:2492] 2026-02-19T08:20:27.102136Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:27.102424Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:27.102463Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Send handshake: worker# [1:7608488293785421587:2492] 2026-02-19T08:20:27.104006Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:27.104199Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-02-19T08:20:27.104337Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298080388976:2551] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:27.104398Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.104452Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298080388976:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:27.110827Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298080388976:2551] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:27.110934Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.110980Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298080388972:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2026-02-19T08:20:26.408535Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488291814159347:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:26.408603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:26.889789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:26.985801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:26.985960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:27.090133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488291814159086:2081] 1771489226357938 != 1771489226357941 2026-02-19T08:20:27.104154Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:27.115615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:27.245917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:27.381996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:27.774030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:27.774049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:27.774056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:27.774136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:28.201248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:28.215328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:20:28.221288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:28.414368Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handshake: worker# [1:7608488300404094525:2493] 2026-02-19T08:20:28.414754Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:28.415037Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:28.415075Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Send handshake: worker# [1:7608488300404094525:2493] 2026-02-19T08:20:28.418210Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.436749Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-02-19T08:20:28.437009Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2026-02-19T08:20:28.437223Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:28.437281Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.437389Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:28.440278Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.440343Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.440388Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2026-02-19T08:20:28.440682Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.441118Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.441554Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2026-02-19T08:20:28.441649Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2026-02-19T08:20:28.441839Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-02-19T08:20:28.444422Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.444485Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.444516Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2026-02-19T08:20:28.444980Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.445300Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2026-02-19T08:20:28.445428Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-02-19T08:20:28.446889Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488300404094621:2555] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.446929Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.446977Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2026-02-19T08:20:28.447427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488300404094618:2555] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } >> TPQCDTest::TestUnavailableWithoutNetClassifier >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2026-02-19T08:20:26.419284Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488294118072930:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:26.419425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:26.468900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:26.901791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:26.933259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:26.933351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:27.021815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488294118072689:2081] 1771489226361149 != 1771489226361152 2026-02-19T08:20:27.028078Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:27.037122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:27.063415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:27.409013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:27.441319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:27.441342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:27.441350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:27.441438Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:27.769170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:27.786083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:27.789323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:27.995376Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handshake: worker# [1:7608488298413040822:2487] 2026-02-19T08:20:27.995699Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:27.995926Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:27.995984Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Send handshake: worker# [1:7608488298413040822:2487] 2026-02-19T08:20:27.997188Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 35b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 23b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:27.997729Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 35 },{ Order: 3 BodySize: 23 }] } 2026-02-19T08:20:27.997894Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298413040915:2547] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:27.997944Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:27.999016Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298413040915:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 35b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 23b }] } 2026-02-19T08:20:28.001603Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488298413040915:2547] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.001656Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.001746Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488298413040912:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::CreateTableAs-Stats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 27045, msgbus: 18723 2026-02-19T08:13:55.670076Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486611777057465:2183];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:13:55.670592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:13:55.989051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:13:56.036118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:13:56.036220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:13:56.066684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:13:56.080322Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:13:56.283258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:13:56.298668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:13:56.298692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:13:56.298699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:13:56.298889Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:13:56.602709Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486611777057567:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:13:56.602800Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486616072025650:2740] HANDLE EvNavigateScheme dc-1 2026-02-19T08:13:56.603228Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486616072025650:2740] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.638024Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486616072025650:2740] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:13:56.642715Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486616072025650:2740] Handle TEvDescribeSchemeResult Forward to# [1:7608486616072025649:2739] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:13:56.654616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486611777057567:2143] Handle TEvProposeTransaction 2026-02-19T08:13:56.654642Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486611777057567:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:13:56.654720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486611777057567:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486616072025655:2745] 2026-02-19T08:13:56.676990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:13:56.784617Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486616072025655:2745] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:56.784701Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486616072025655:2745] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-02-19T08:13:56.784721Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486616072025655:2745] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:56.784782Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486616072025655:2745] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:56.785258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486616072025655:2745] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.785576Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486616072025655:2745] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:13:56.785686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486616072025655:2745] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:13:56.785901Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486616072025655:2745] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:13:56.786626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:13:56.791050Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486616072025655:2745] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:13:56.791128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486616072025655:2745] txid# 281474976710657 SEND to# [1:7608486616072025654:2744] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:13:56.815461Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486611777057567:2143] Handle TEvProposeTransaction 2026-02-19T08:13:56.815516Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486611777057567:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:13:56.815562Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486611777057567:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486616072025700:2780] 2026-02-19T08:13:56.817221Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486616072025700:2780] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:13:56.817290Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486616072025700:2780] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-02-19T08:13:56.817305Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486616072025700:2780] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:13:56.817358Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486616072025700:2780] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:13:56.817807Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486616072025700:2780] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:13:56.817958Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486616072025700:2780] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:13:56.818036Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486616072025700:2780] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:13:56.818203Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486616072025700:2780] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:13:56.818766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:13:56.821699Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486616072025700:2780] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:13:56.821770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486616072025700:2780] txid# 281474976710658 SEND to# [1:7608486616072025699:2779] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:13:56.866657Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486611777057567:2143] Handle TEvProposeTransaction 2026-02-19T08:13:56.866692Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:23 ... fQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:39716" 2026-02-19T08:20:18.552798Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488260119184243:3093] txid# 281474976715665 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:18.552823Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488260119184243:3093] txid# 281474976715665 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:18.553033Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488260119184243:3093] txid# 281474976715665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:18.553091Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488260119184243:3093] txid# 281474976715665 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:18.553147Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488260119184243:3093] txid# 281474976715665 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:18.553459Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488260119184243:3093] txid# 281474976715665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:18.553491Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488260119184243:3093] txid# 281474976715665, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:18.553595Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488260119184243:3093] txid# 281474976715665, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:18.553630Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488260119184243:3093] txid# 281474976715665 SEND to# [59:7608488260119184242:2356] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:18.554132Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NjRmYTBjN2QtNTk5ODA4NDEtMjAwYzlkNWUtYWEzZTYxY2U=, ActorId: [59:7608488255824216890:2356], ActorState: ExecuteState, LegacyTraceId: 01khtfrkyjc1zsphk3tt7p2qpg, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:18.556120Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488217169509820:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:18.556144Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488217169509820:2141] TxId# 281474976715666 ProcessProposeKqpTransaction 2026-02-19T08:20:22.314606Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488217169509820:2141] Handle TEvProposeTransaction 2026-02-19T08:20:22.314643Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488217169509820:2141] TxId# 281474976715667 ProcessProposeTransaction 2026-02-19T08:20:22.314701Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488217169509820:2141] Cookie# 0 userReqId# "" txid# 281474976715667 SEND to# [59:7608488277299053492:3121] 2026-02-19T08:20:22.316956Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488277299053492:3121] txid# 281474976715667 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQxNiwiaWF0IjoxNzcxNDg5MjE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.aBDlkSpT5rYzU17DYNl6CUHnj5V06dvcMjSI_Yuy8LhnR7wO1sY99dvNCh9iHB3CvHpDl8-xXuonlW8a5RrQhfHLU2nq3JN_XtIzWk1ORM1cKAYkYmRk2AvPYmmIlHQvpfWvElRroepuhqKokG-B3fi9CniBx_BwJx89eLnJfF1ajSbyVHIRpbicmrxnQYURDdPX7UejHaZd-ZrgNChXTV48CSxULQ-USVtVCjnDJQlx3UKkG6wGXSzfJWtTiQaFF20dYa2P0PfTQyG4qjaoROCX1yzdWeoCvv8UyldiftfgCctAb4a7NCjKwYBiUXqxTY7TojIKI0mEZRfsZbDNaA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQxNiwiaWF0IjoxNzcxNDg5MjE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46516" 2026-02-19T08:20:22.317039Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488277299053492:3121] txid# 281474976715667 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:22.317062Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488277299053492:3121] txid# 281474976715667 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:22.317250Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488277299053492:3121] txid# 281474976715667 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:22.317310Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488277299053492:3121] txid# 281474976715667 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:22.317364Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488277299053492:3121] txid# 281474976715667 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:22.317656Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488277299053492:3121] txid# 281474976715667 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:22.317685Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488277299053492:3121] txid# 281474976715667, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:22.321937Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488277299053492:3121] txid# 281474976715667, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:22.321995Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488277299053492:3121] txid# 281474976715667 SEND to# [59:7608488277299053491:2356] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:22.322728Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NjRmYTBjN2QtNTk5ODA4NDEtMjAwYzlkNWUtYWEzZTYxY2U=, ActorId: [59:7608488255824216890:2356], ActorState: ExecuteState, LegacyTraceId: 01khtfrqm60tj8wjtmcepr2nxj, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:22.323134Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488217169509820:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:22.323160Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488217169509820:2141] TxId# 281474976715668 ProcessProposeKqpTransaction 2026-02-19T08:20:23.566813Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:20:23.566837Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:29.482263Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488217169509820:2141] Handle TEvProposeTransaction 2026-02-19T08:20:29.482298Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488217169509820:2141] TxId# 281474976715669 ProcessProposeTransaction 2026-02-19T08:20:29.482348Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488217169509820:2141] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [59:7608488307363824682:3174] 2026-02-19T08:20:29.484573Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488307363824682:3174] txid# 281474976715669 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQxNiwiaWF0IjoxNzcxNDg5MjE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.aBDlkSpT5rYzU17DYNl6CUHnj5V06dvcMjSI_Yuy8LhnR7wO1sY99dvNCh9iHB3CvHpDl8-xXuonlW8a5RrQhfHLU2nq3JN_XtIzWk1ORM1cKAYkYmRk2AvPYmmIlHQvpfWvElRroepuhqKokG-B3fi9CniBx_BwJx89eLnJfF1ajSbyVHIRpbicmrxnQYURDdPX7UejHaZd-ZrgNChXTV48CSxULQ-USVtVCjnDJQlx3UKkG6wGXSzfJWtTiQaFF20dYa2P0PfTQyG4qjaoROCX1yzdWeoCvv8UyldiftfgCctAb4a7NCjKwYBiUXqxTY7TojIKI0mEZRfsZbDNaA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQxNiwiaWF0IjoxNzcxNDg5MjE2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:46516" 2026-02-19T08:20:29.484651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488307363824682:3174] txid# 281474976715669 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:29.484672Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488307363824682:3174] txid# 281474976715669 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:29.484825Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488307363824682:3174] txid# 281474976715669 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:29.484880Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488307363824682:3174] txid# 281474976715669 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:29.484932Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488307363824682:3174] txid# 281474976715669 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:29.485210Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488307363824682:3174] txid# 281474976715669 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:29.485237Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488307363824682:3174] txid# 281474976715669, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:29.485344Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488307363824682:3174] txid# 281474976715669, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:29.485375Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488307363824682:3174] txid# 281474976715669 SEND to# [59:7608488307363824681:2356] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:29.486438Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NjRmYTBjN2QtNTk5ODA4NDEtMjAwYzlkNWUtYWEzZTYxY2U=, ActorId: [59:7608488255824216890:2356], ActorState: ExecuteState, LegacyTraceId: 01khtfrymacf0nz7bx311r939s, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:29.486741Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488217169509820:2141] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:29.486761Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488217169509820:2141] TxId# 281474976715670 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2026-02-19T08:20:25.671819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488286908856633:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:25.674551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:26.244597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:26.317187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:26.317322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:26.506923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488286908856398:2081] 1771489225614489 != 1771489225614492 2026-02-19T08:20:26.516457Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:26.519195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:26.520277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:26.689860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:27.151589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:27.151611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:27.151620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:27.151727Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:27.896459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:27.906646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:27.910082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:28.129785Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handshake: worker# [1:7608488295498791834:2491] 2026-02-19T08:20:28.130096Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:20:28.130310Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:20:28.130346Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Send handshake: worker# [1:7608488295498791834:2491] 2026-02-19T08:20:28.137863Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.145208Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-02-19T08:20:28.145358Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-02-19T08:20:28.145572Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488299793759226:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:20:28.146004Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.146115Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488299793759226:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:28.158043Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488299793759226:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.158130Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.158186Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-02-19T08:20:28.161969Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-02-19T08:20:28.166198Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-02-19T08:20:28.166344Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2026-02-19T08:20:28.166450Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488299793759226:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-02-19T08:20:28.170877Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7608488299793759226:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-02-19T08:20:28.170955Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:20:28.171008Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7608488299793759223:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } >> TPQCDTest::TestUnavailableWithoutClustersList |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore [GOOD] >> TBackupCollectionTests::BackupRestoreCoveringIndex |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] >> Replication::ResourceId [GOOD] |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest >> TestSqsTopicHttpProxy::TestPurgeQueue >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest >> Replication::ResourceId [GOOD] |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest |92.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_replication/unittest >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: 2026-02-19T08:19:33.944565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488066318873871:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:33.952406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:34.041227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:34.376004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:34.376133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:34.377937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488066318873822:2081] 1771489173929904 != 1771489173929907 2026-02-19T08:19:34.389109Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:34.394570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:34.394939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:34.552826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:34.552854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:34.552862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:34.552953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:34.581848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:34.952015Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:35.061275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:35.075084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:35.143296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:35.381816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:35.580413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:35.674914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:37.669864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488083498744890:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.669990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.670569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488083498744900:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:37.670630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.063731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.118858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.183436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.247069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.295204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.349985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.416736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.513768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:38.665645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488087793713067:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.665765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.666388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488087793713072:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.666448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488087793713073:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.666581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:38.671750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:38.699505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488087793713076:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:38.775762Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488087793713127:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:38.946107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488066318873871:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:38.946205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.541536Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.595781Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.649284Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.709128Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.832410Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.890619Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:29.977772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.072317Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488309137040464:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.072398Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.072579Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488309137040469:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.072615Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488309137040470:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.072654Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.076644Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:30.092604Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488309137040473:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:20:30.179193Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488309137040524:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:33.085621Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:34.648951Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608488326316910258:2617], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2026-02-19T08:20:34.649523Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=M2VlMGZkYzgtMTY4ZTcxNzAtOGZlZjg1Yi02ZjQ3NjMxYg==, ActorId: [5:7608488326316910256:2616], ActorState: ExecuteState, LegacyTraceId: 01khtfs3p3b1t5ppw7pp7nzm4z, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 17 } message: "At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda" end_position { row: 4 column: 17 } severity: 1 issues { position { row: 3 column: 25 } message: "At function: Parameter, At function: DataType" end_position { row: 3 column: 25 } severity: 1 issues { position { row: 3 column: 25 } message: "Invalid decimal precision: 99" end_position { row: 3 column: 25 } severity: 1 } } } } tx_id# trace_id# 2026-02-19T08:20:34.787448Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=YjExMGFmZmQtZGNmODc5MmUtNzIwODc4LWQzZmRlNTJk, ActorId: [5:7608488326316910262:2619], ActorState: ExecuteState, LegacyTraceId: 01khtfs3q8393a6kj6ntn6b8zy, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1671: ydb/core/kqp/query_data/kqp_query_data.cpp:263: Parameter $value22 type mismatch, expected: Type (Data), schemeType: Decimal(22,9), schemeTypeId: 4865 , actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 status# BAD_REQUEST issues# trace_id# 2026-02-19T08:20:34.847913Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608488326316910280:2625], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:20:34.850454Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=MzA0NDM4MmItNGI1ZDA3MjAtNmZhODY4M2UtNTVmNWE1Mjk=, ActorId: [5:7608488326316910278:2624], ActorState: ExecuteState, LegacyTraceId: 01khtfs3vxb86s87peskxza7e7, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 7 column: 29 } message: "At function: KiWriteTable!" end_position { row: 7 column: 29 } severity: 1 issues { position { row: 7 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 7 column: 50 } severity: 1 issues { position { row: 4 column: 25 } message: "Implicit decimal cast would lose precision" end_position { row: 4 column: 25 } severity: 1 } issues { position { row: 7 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 7 column: 50 } severity: 1 } } issues { position { row: 7 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 7 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:34.890694Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608488326316910292:2630], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:20:34.893593Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=MjY0YzRlZi1hZGI3ODU1YS1hODE5MjM3Ny0zODBiOTVlZQ==, ActorId: [5:7608488326316910289:2629], ActorState: ExecuteState, LegacyTraceId: 01khtfs3xh28gar9sq91q1gvpr, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 3 column: 50 } severity: 1 issues { position { column: 14 } message: "Implicit decimal cast would lose precision" end_position { column: 14 } severity: 1 } issues { position { row: 3 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 3 column: 50 } severity: 1 } } issues { position { row: 3 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2026-02-19T08:20:33.447718Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488323291795273:2154];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:33.455179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:33.775630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:33.775723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:33.824119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:33.865781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488323291795138:2081] 1771489233437483 != 1771489233437486 2026-02-19T08:20:33.870225Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:33.881183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:34.003617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:34.023805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00320c/r3tmp/yandexdjUAzb.tmp 2026-02-19T08:20:34.023833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00320c/r3tmp/yandexdjUAzb.tmp 2026-02-19T08:20:34.024023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00320c/r3tmp/yandexdjUAzb.tmp 2026-02-19T08:20:34.024135Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:34.459645Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:36.943729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488336176698011:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.943729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488336176697996:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.943850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.945865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488336176698014:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.945939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.952889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:36.968729Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488336176698016:2384], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-02-19T08:20:37.071159Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488340471665364:2568] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] >> TBackupCollectionTests::AlterTableInBackupCollectionProtection |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> KqpStats::MultiTxStatsFullScan [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: 2026-02-19T08:19:52.494080Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488146525980267:2213];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:52.494164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:52.853788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:52.882569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:52.882684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:52.949492Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:52.953694Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488146525980074:2081] 1771489192470693 != 1771489192470696 2026-02-19T08:19:52.979145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:53.105215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:53.282422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:53.282443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:53.282450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:53.282533Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:53.502433Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:54.240039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:54.248670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:54.344665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.544888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.760317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.858017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.494543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488146525980267:2213];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:57.495300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:57.579061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488168000818436:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.579201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.579759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488168000818446:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.579828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.269878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.375958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.441818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.516643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.603413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.683117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.781276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.934791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.100350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488176590753923:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.100440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.101264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488176590753928:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.101303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488176590753929:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.101442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.108516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:59.136668Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488176590753932:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:59.196594Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488176590753983:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:01.275870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 2814749767106 ... 8Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:25.966360Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:25.966386Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:25.966395Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:25.966498Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:26.501506Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:26.791592Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:26.798963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:26.811308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:20:26.869207Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:20:30.008881Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488311216335106:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.008952Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488311216335140:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.009016Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.014257Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:30.035183Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608488311216335143:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:20:30.102797Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608488311216335205:2621] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:30.132712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.427202Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.490081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488289741497768:2139];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:30.490202Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:30.722532Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:30.734942Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:32.229929Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:32.230046Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:32.353757Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:32.353849Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:32.369941Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488318753126534:2081] 1771489232152259 != 1771489232152262 2026-02-19T08:20:32.390356Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:32.406692Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:32.512071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:32.558231Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:32.558261Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:32.558272Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:32.558378Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:33.134626Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:33.198933Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:37.130196Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488340227963884:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.130384Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.130853Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488340227963919:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.136464Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:37.158289Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488340227963921:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:20:37.257653Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488340227963984:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:37.303556Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:37.701822Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608488340227964116:2356], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2026-02-19T08:20:37.702289Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=NjVjOThjZGMtNGVhZmQ0NzgtZjc5OTZkYjgtYmYzNjkyMDk=, ActorId: [5:7608488340227964114:2355], ActorState: ExecuteState, LegacyTraceId: 01khtfs6kx0fvr4bhf5da948fm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 5 column: 49 } message: "Creating table with data is not supported." end_position { row: 5 column: 49 } severity: 1 } } tx_id# trace_id# >> Secret::Validation >> Secret::DeactivatedQueryService >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |92.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink >> Secret::Simple |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: 2026-02-19T08:19:46.845063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488119977792159:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:46.845161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:46.877444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:47.237764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:47.308192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:47.315754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:47.394858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:47.430364Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:47.435458Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488119977792043:2081] 1771489186834172 != 1771489186834175 2026-02-19T08:19:47.715401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:47.762402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:47.762430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:47.762437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:47.762515Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:47.854035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:48.853748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:48.872549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:48.938652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:49.164591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:49.459223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:49.584278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.845174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488119977792159:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:51.845272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:52.215659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488145747597698:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.215775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.216267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488145747597708:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.216313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.960734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.001913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.042739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.083082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.122162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.171612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.271430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.366423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.524211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488150042565879:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.524304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.524651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488150042565884:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.524686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488150042565885:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.524785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.530552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:53.554689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488150042565888:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:53.655708Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488150042565939:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges ... -02-19T08:20:35.585375Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.636219Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.683868Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.729005Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.798768Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.846166Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:35.908656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:36.021575Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:36.194060Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488335285623724:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.194190Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.198103Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488335285623729:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.198184Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488335285623730:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.198339Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:36.203743Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:36.225899Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488335285623733:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:20:36.305644Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488335285623784:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:39.065901Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489239058, txId: 281474976710673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"WaitOutputTimeUs":{"Count":1,"Max":1547,"Sum":1547,"History":[5,1547],"Min":1547},"UseLlvm":"undefined","Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":0,"FinishedTasks":1,"Introspections":["1 tasks for a single\/sequential source scan"],"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"DurationUs":{"Count":1,"Max":1000,"Sum":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,5,1048576],"Min":1048576},"BaseTimeMs":1771489239025,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[5,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4}},"LocalBytes":37,"Name":"4","Push":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[5,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}}}],"CpuTimeUs":{"Count":1,"Max":1408,"Sum":1408,"History":[1,1151,5,1408],"Min":1408},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[5,144],"Min":144},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[5,144],"Min":144},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1736,"Sum":1736,"History":[5,1736],"Min":1736},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"StageDurationUs":1000,"OutputBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"Tasks":1,"UpdateTimeMs":4}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Max":2000,"Sum":2000,"Min":2000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[5,1048576,6,1048576],"Min":1048576},"BaseTimeMs":1771489239025,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[6,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4}},"LocalBytes":37,"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"FirstMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[5,37,6,37],"Min":37},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":3243,"Sum":3243,"History":[5,3243,6,3243],"Min":3243},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":4,"Min":1}}}],"CpuTimeUs":{"Count":1,"Max":3901,"Sum":3901,"History":[5,3896,6,3901],"Min":3901},"StageDurationUs":2000,"ResultRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResultBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"OutputBytes":{"Count":1,"Max":37,"Sum":37,"Min":37},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[5,37,6,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":4,"Sum":4,"Min":4}},"LocalBytes":37,"Name":"2","Push":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":37,"Sum":37,"History":[5,37,6,37],"Min":37},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}}}],"Tasks":1,"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"UpdateTimeMs":5}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":331267,"ProcessCpuTimeUs":344,"Compilation":{"FromCache":false,"CpuTimeUs":283372,"DurationUs":289648}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Rows":3,"A-SelfCpu":3.901,"A-Size":37,"A-Cpu":5.309,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":3,"A-SelfCpu":1.408,"A-Size":37,"A-Cpu":1.408,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: 2026-02-19T08:19:50.050496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488138088194531:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:50.050552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:50.449817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:50.487778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:50.487885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:50.569406Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:50.575245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:50.726256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:50.738571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:50.738603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:50.738610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:50.738689Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:51.061831Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:51.318200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:51.366291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:51.461697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.756814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.104506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.286152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.051367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488138088194531:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:55.051449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:55.086069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488159563032754:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:55.086209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:55.086758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488159563032765:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:55.086833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:55.429841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.475728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.563430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.645188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.712254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.773855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.840165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.933937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.077359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488163858000935:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.077443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.078083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488163858000940:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.078132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488163858000941:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.078247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.083454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:56.100429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488163858000944:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:56.204187Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488163858000997:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"Tables":["EightShard"],"PlanNodeId":3,"Plans":[{"PlanNodeId":2 ... edPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-02-19T08:20:33.107194Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488322372899811:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:33.107265Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:33.205384Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:33.456878Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:33.462064Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488322372899769:2081] 1771489233102030 != 1771489233102033 2026-02-19T08:20:33.497801Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:33.534417Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:33.534534Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:33.592563Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:33.826504Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:33.826531Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:33.826543Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:33.826655Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:33.875866Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:34.133863Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:34.947855Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:38.113845Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488322372899811:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:38.113936Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:38.820684Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488343847737132:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.820827Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.821334Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488343847737159:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.821413Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488343847737160:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.821513Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.826100Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:38.844002Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488343847737163:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:20:38.928090Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488343847737214:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:38.993008Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2026-02-19T08:20:33.951238Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488322623205517:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:33.951398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:34.373859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:34.453183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:34.453803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:34.545945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:34.551813Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:34.553862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488322623205411:2081] 1771489233946872 != 1771489233946875 2026-02-19T08:20:34.742898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:34.846372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00320b/r3tmp/yandexYvYyef.tmp 2026-02-19T08:20:34.846397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00320b/r3tmp/yandexYvYyef.tmp 2026-02-19T08:20:34.846538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00320b/r3tmp/yandexYvYyef.tmp 2026-02-19T08:20:34.846632Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:34.965930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:10377 2026-02-19T08:20:35.181885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:35.190921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:35.208663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:20:35.252168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:20:38.033113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488344098042856:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.033398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.034030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488344098042868:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.034104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488344098042869:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.034295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:38.047629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:38.064324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488344098042872:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:20:38.570886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:38.578460Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488344098042937:2589] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:38.800571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:38.969683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488322623205517:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:38.969802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:38.982324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:39.029257Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488344098042998:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:39.030358Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZmUxMmMwMDYtODk3OTk1YWUtMzc0NDUyMTMtN2I2ZDIzMmY=, ActorId: [1:7608488339803075534:2329], ActorState: ExecuteState, LegacyTraceId: 01khtfs6zn2ntbsqwfmvhtt3fm, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:39.032702Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:35.230872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:35.230991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:35.231064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:35.231105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:35.231153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:35.231207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:35.231252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:35.231289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:35.231354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:35.231457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:35.232464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:35.232823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:35.330322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:35.330398Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:35.344513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:35.345258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:35.345467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:35.358936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:35.359676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:35.360467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:35.360840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:35.364294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:35.364502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:35.365863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:35.365932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:35.366046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:35.366096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:35.366139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:35.366332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:35.517814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.522987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:35.523637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ocalPathId: 2] 2026-02-19T08:20:41.516127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-02-19T08:20:41.516156Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:41.516179Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-02-19T08:20:41.516208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:41.516233Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-02-19T08:20:41.516284Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:41.516317Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:20:41.516371Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 39] name: DirSub1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:20:41.516406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:20:41.516999Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:41.517139Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:20:41.517181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:20:41.517232Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:20:41.517275Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:20:41.517346Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:41.517428Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-02-19T08:20:41.517493Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:20:41.517543Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:20:41.517589Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 0 2026-02-19T08:20:41.517630Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-02-19T08:20:41.531118Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusSuccess TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:41.531312Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2026-02-19T08:20:41.531597Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:41.531645Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:41.531883Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:41.531937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 107, path id: 1 2026-02-19T08:20:41.532613Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:20:41.532767Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:20:41.532828Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:20:41.532878Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-02-19T08:20:41.532936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:20:41.533067Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 0 2026-02-19T08:20:41.535669Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 TestModificationResult got TxId: 107, wait until txId: 107 2026-02-19T08:20:41.536528Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:41.536780Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 294us result status StatusSuccess 2026-02-19T08:20:41.537264Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:41.541032Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:41.541335Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 329us result status StatusSuccess 2026-02-19T08:20:41.541768Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:41.542624Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:41.542734Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: Cannot find user 'user1', at schemeshard: 72057594046678944 >> TBackupCollectionTests::AlterTableInBackupCollectionProtection [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2026-02-19T08:20:32.927269Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488319512048571:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:32.927312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:33.417823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:33.463469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:33.463580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:33.648155Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:33.652377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488319512048541:2081] 1771489232924680 != 1771489232924683 2026-02-19T08:20:33.673786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:33.681060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:34.016644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:34.030852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:34.030874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:34.030881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:34.031013Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:34.042714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:13786 2026-02-19T08:20:34.428673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:34.449119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:34.545793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-02-19T08:20:34.565897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:20:37.370336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488340986885969:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.370566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.374095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488340986886002:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.374174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488340986886003:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.374342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:37.378687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:37.457450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488340986886006:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:20:37.857901Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488340986886072:2593] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:37.901472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:37.929089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488319512048571:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:37.929173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:38.106137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:38.151706Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488340986886080:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:38.154171Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=N2U4YTJhOTUtMzZhYWUyZjAtOTlmZDhlNjUtNjZjMzM4N2Q=, ActorId: [1:7608488340986885964:2331], ActorState: ExecuteState, LegacyTraceId: 01khtfs6b12x6h7njnq3kn4ydr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:38.156551Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:20:38.276245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> BSCMovePDisk::PDiskMove_Block42 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> BSCMovePDisk::PDiskMove_ErasureNone |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::AlterTableInBackupCollectionProtection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:19:15.938787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:19:15.938884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:15.938950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:19:15.939009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:19:15.939049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:19:15.939085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:19:15.939114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:19:15.939165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:19:15.939246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:19:15.939322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:19:15.940225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:19:15.940537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:19:16.072972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:19:16.073050Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:16.104093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:19:16.109891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:19:16.110164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:19:16.138030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:19:16.138986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:19:16.139780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:19:16.140201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:19:16.150671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:16.150944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:19:16.152449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:19:16.152528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:19:16.152692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:19:16.152761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:19:16.152821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:19:16.153054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:19:16.335970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.337282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.337442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.337588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.337756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.337865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:19:16.338886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... eived event# 269551620, Sender [26:771:2755], Recipient [26:128:2152]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 771 RawX2: 111669152451 } Origin: 72075186233409546 State: 2 TxId: 108 Step: 0 Generation: 2 2026-02-19T08:20:42.766297Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5330: StateWork, processing event TEvDataShard::TEvSchemaChanged 2026-02-19T08:20:42.766477Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 771 RawX2: 111669152451 } Origin: 72075186233409546 State: 2 TxId: 108 Step: 0 Generation: 2 2026-02-19T08:20:42.766560Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409546, partId: 0 2026-02-19T08:20:42.766787Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Source { RawX1: 771 RawX2: 111669152451 } Origin: 72075186233409546 State: 2 TxId: 108 Step: 0 Generation: 2 2026-02-19T08:20:42.766930Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 108:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:20:42.767139Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 108:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 771 RawX2: 111669152451 } Origin: 72075186233409546 State: 2 TxId: 108 Step: 0 Generation: 2 2026-02-19T08:20:42.767284Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 108:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:42.767374Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:20:42.767472Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:20:42.767573Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 ProposedWaitParts -> Done 2026-02-19T08:20:42.767903Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:42.771241Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:20:42.771354Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:42.771608Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:20:42.771648Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:42.772011Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:20:42.772118Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:42.772198Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 108:0 2026-02-19T08:20:42.772379Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [26:771:2755] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 108 at schemeshard: 72057594046678944 2026-02-19T08:20:42.772794Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [26:128:2152], Recipient [26:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:20:42.772839Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:20:42.772891Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:20:42.772953Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 108:0 ProgressState 2026-02-19T08:20:42.773188Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:20:42.773235Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:20:42.773312Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:20:42.773391Z node 26 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:20:42.773467Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:20:42.773543Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-02-19T08:20:42.773658Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [26:708:2696] message: TxId: 108 2026-02-19T08:20:42.773770Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:20:42.773854Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-02-19T08:20:42.773921Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 108:0 2026-02-19T08:20:42.774140Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:20:42.775993Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:20:42.776141Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [26:708:2696] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 108 at schemeshard: 72057594046678944 2026-02-19T08:20:42.776403Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:20:42.776495Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [26:932:2900] 2026-02-19T08:20:42.776757Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [26:934:2902], Recipient [26:128:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:42.776797Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:20:42.776828Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 2026-02-19T08:20:42.777493Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [26:965:2933], Recipient [26:128:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/ProtectedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:20:42.777592Z node 26 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:20:42.777795Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ProtectedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:42.778165Z node 26 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ProtectedTable" took 415us result status StatusSuccess 2026-02-19T08:20:42.778787Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ProtectedTable" PathDescription { Self { Name: "ProtectedTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "ProtectedTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "new_column" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 9845968427652473870 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TStateStorageRingGroupState::TestStateStorageDoubleReply >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |92.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: 2026-02-19T08:19:49.390452Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488134632572615:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:49.390525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:49.438462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:49.750059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:49.750181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:49.753949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:49.828071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:49.888790Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:49.894758Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488134632572389:2081] 1771489189335539 != 1771489189335542 2026-02-19T08:19:50.061940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:50.097947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:50.097988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:50.097998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:50.098091Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:50.385881Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:50.680893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:50.688002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:50.762550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:50.990808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.162820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.247817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.187936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488151812443445:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.188087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.188461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488151812443455:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.188501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:53.620107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.663115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.752831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.797422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.839463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.883512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:53.932298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.002693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.166852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488156107411624:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.166956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.167761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488156107411629:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.167808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488156107411630:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.167933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.172733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:54.218402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488156107411633:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:54.281081Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488156107411684:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:54.390183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488134632572615:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:54.390286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... unt":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":8,"Max":8,"Min":8,"History":[11,8]}},"LocalBytes":8,"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":8,"Max":8,"Min":8,"History":[11,8]},"WaitTimeUs":{"Count":1,"Sum":1272,"Max":1272,"Min":1272,"History":[11,1272]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":10,"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":12638,"CpuTimeUs":6601},"ProcessCpuTimeUs":1111,"TotalDurationUs":381506,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2026-02-19T08:20:43.778906Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:43.787250Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 query_phases { duration_us: 17112 table_access { name: "/Root/.tmp/sessions/69290e53-42ac-11a5-a3db-e9991d15deb6/Root/Destination_80138cdd-45c6-f211-7681-4f8a1410e264" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 1402 affected_shards: 1 } compilation { duration_us: 12638 cpu_time_us: 6601 } process_cpu_time_us: 1111 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[2,8]}},\"LocalBytes\":8,\"Name\":\"4\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[2,8]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1272,\"Max\":1272,\"Min\":1272,\"History\":[2,1272]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576]},\"Introspections\":[\"1 tasks default for source scan\"],\"WaitOutputTimeUs\":{\"Count\":1,\"Sum\":26,\"Max\":26,\"Min\":26,\"History\":[2,26]},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"BaseTimeMs\":1771489243635,\"OutputBytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"CpuTimeUs\":{\"Count\":1,\"Sum\":800,\"Max\":800,\"Min\":800,\"History\":[2,800]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[2,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[2,64]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1086,\"Max\":1086,\"Min\":1086,\"History\":[2,1086]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":1}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[11,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[11,28]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":910,\"Max\":910,\"Min\":910,\"History\":[11,910]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/69290e53-42ac-11a5-a3db-e9991d15deb6\\/Root\\/Destination_80138cdd-45c6-f211-7681-4f8a1410e264\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[11,1048576]},\"BaseTimeMs\":1771489243635,\"CpuTimeUs\":{\"Count\":1,\"Sum\":602,\"Max\":602,\"Min\":602,\"History\":[11,602]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":9000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[11,8]}},\"LocalBytes\":8,\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8,\"History\":[11,8]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1272,\"Max\":1272,\"Min\":1272,\"History\":[11,1272]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":10,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":12638,\"CpuTimeUs\":6601},\"ProcessCpuTimeUs\":1111,\"TotalDurationUs\":381506,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:42\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"ca6c5cd9-1e774848-52c72cf0-dd755553\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/69290e53-42ac-11a5-a3db-e9991d15deb6/Root/Destination_80138cdd-45c6-f211-7681-4f8a1410e264\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'() \'() \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"b9f614ef-8dc38d72-250e1532-b9dcc298\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 381506 total_cpu_time_us: 9114 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/69290e53-42ac-11a5-a3db-e9991d15deb6/Root/Destination_80138cdd-45c6-f211-7681-4f8a1410e264\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":47},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1771489243\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"cb71de14-a7ae5bb9-39c476-bea95653\",\"version\":\"1.0\"}" >> TStateStorageRingGroupState::TestProxyConfigMismatch >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder >> TStateStorageRingGroupState::TestStateStorageUpdateSig >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] Test command err: RandomSeed# 12982870234274374053 2026-02-19T08:20:47.304409Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:250:21] SessionId# [1:136:3] Cookie# 1586017101447211811 2026-02-19T08:20:47.304467Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [1:136:3] Inserted# false Subscription# {SessionId# [1:136:3] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-02-19T08:20:47.310904Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 1586017101447211811 SessionId# [1:136:3] Binding# {4.4/1586017101447211811@[1:136:3]} Record# {RootNodeId: 6 } 2026-02-19T08:20:47.311040Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006647s 2026-02-19T08:20:47.311117Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:250:21] SessionId# [9:83:4] Cookie# 17516111358799494110 2026-02-19T08:20:47.311179Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.311253Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 17516111358799494110 SessionId# [9:83:4] Binding# {4.4/17516111358799494110@[9:83:4]} Record# {RootNodeId: 6 } 2026-02-19T08:20:47.311296Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [6:264:21] SessionId# [3:94:5] Cookie# 9109554507711531382 2026-02-19T08:20:47.311337Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [3:94:5] Inserted# false Subscription# {SessionId# [3:94:5] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:47.311630Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 6 Cookie# 9109554507711531382 SessionId# [3:94:5] Binding# {1.4/18247902309727842569@[3:134:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2026-02-19T08:20:47.433150Z 1 00h00m30.191997s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:47.433198Z 1 00h00m30.191997s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:47.433280Z 1 00h00m30.191997s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:47.433368Z 1 00h00m30.191997s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.433446Z 1 00h00m30.191997s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2026-02-19T08:20:47.433521Z 1 00h00m30.191997s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 2 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.433553Z 1 00h00m30.191997s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-02-19T08:20:47.433690Z 1 00h00m30.191997s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2026-02-19T08:20:47.442950Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.443041Z 1 00h00m38.426939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.443092Z 1 00h00m38.426939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.443126Z 1 00h00m38.426939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.443156Z 1 00h00m38.426939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.443184Z 1 00h00m38.426939s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.443245Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.443304Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.443335Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.443387Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.443438Z 1 00h00m38.426939s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.445882Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.445976Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:47.446023Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:47.446057Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:47.446086Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:47.446120Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:47.446168Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.446218Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.446282Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.456044Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.456187Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:47.456255Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:47.456311Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:47.456378Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:47.456427Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:47.456492Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.456580Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.456623Z 1 00h00m50.300000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-02-19T08:20:47.456710Z 1 00h00m50.300000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 >> TStateStorageRingGroupState::TestBoardConfigMismatch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-02-19T08:18:06.658854Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1771489086658815 2026-02-19T08:18:07.251887Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487697611881617:2205];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:07.252150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:07.349612Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:18:07.433296Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:18:07.862865Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:07.862993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:18:07.912736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:07.992826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:07.992928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:08.009863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:08.009994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:08.038078Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:18:08.038224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:08.070558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:08.233160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:08.235933Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:08.256853Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:08.276269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:08.424586Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.013459s 2026-02-19T08:18:08.426370Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:08.670441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/00394b/r3tmp/yandexUtYTbs.tmp 2026-02-19T08:18:08.670470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/00394b/r3tmp/yandexUtYTbs.tmp 2026-02-19T08:18:08.670652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/00394b/r3tmp/yandexUtYTbs.tmp 2026-02-19T08:18:08.670737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:08.771784Z INFO: TTestServer started on Port 11186 GrpcPort 5036 PQClient connected to localhost:5036 2026-02-19T08:18:09.145002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:09.389921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:18:12.249969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487697611881617:2205];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:12.250066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:12.785978Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487719545981082:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.786059Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487719545981058:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.786185Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.795636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487719086719205:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.795794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.800314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487719086719240:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.800392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487719086719241:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.800536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.800771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:12.822690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487719086719277:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.822782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.824090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487719086719279:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.824145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.842858Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487719545981116:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.842959Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.843337Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608487719545981118:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.843393Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:12.875489Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487719086719246:2910] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:18:12.908958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487719086719245:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-02-19T08:18:12.909309Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608487719545981089:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-02-19T08:18:12.991100Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487719086719326:2966] txid# 281474976715662, issues: { message: "Check failed: path: \ ... actors on pipe 2026-02-19T08:20:42.476547Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-02-19T08:20:42.476586Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-02-19T08:20:42.476675Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|3c064079-b52644a6-11dc40d9-766e2442_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-02-19T08:20:42.476717Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:42.476735Z node 16 :PERSQUEUE DEBUG: partition.cpp:2452: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-02-19T08:20:42.476766Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:20:42.476782Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:42.476807Z node 16 :PERSQUEUE DEBUG: partition.cpp:2516: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-02-19T08:20:42.476838Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:20:42.476856Z node 16 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:20:42.476874Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:20:42.476917Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-02-19T08:20:42.476980Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-02-19T08:20:42.476216Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2026-02-19T08:20:42.479247Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-02-19T08:20:42.479285Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-02-19T08:20:42.479364Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-02-19T08:20:42.480041Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:884: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|3c064079-b52644a6-11dc40d9-766e2442_0 2026-02-19T08:20:42.482752Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1771489242482 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:20:42.482916Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|3c064079-b52644a6-11dc40d9-766e2442_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-02-19T08:20:42.483185Z :INFO: [] MessageGroupId [src] SessionId [src|3c064079-b52644a6-11dc40d9-766e2442_0] Write session: close. Timeout = 0 ms 2026-02-19T08:20:42.483246Z :INFO: [] MessageGroupId [src] SessionId [src|3c064079-b52644a6-11dc40d9-766e2442_0] Write session will now close 2026-02-19T08:20:42.483301Z :DEBUG: [] MessageGroupId [src] SessionId [src|3c064079-b52644a6-11dc40d9-766e2442_0] Write session: aborting 2026-02-19T08:20:42.483950Z :INFO: [] MessageGroupId [src] SessionId [src|3c064079-b52644a6-11dc40d9-766e2442_0] Write session: gracefully shut down, all writes complete 2026-02-19T08:20:42.484005Z :DEBUG: [] MessageGroupId [src] SessionId [src|3c064079-b52644a6-11dc40d9-766e2442_0] Write session: destroy 2026-02-19T08:20:42.485025Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|3c064079-b52644a6-11dc40d9-766e2442_0 grpc read done: success: 0 data: 2026-02-19T08:20:42.485062Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|3c064079-b52644a6-11dc40d9-766e2442_0 grpc read failed 2026-02-19T08:20:42.485106Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|3c064079-b52644a6-11dc40d9-766e2442_0 grpc closed 2026-02-19T08:20:42.485145Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|3c064079-b52644a6-11dc40d9-766e2442_0 is DEAD 2026-02-19T08:20:42.486083Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:20:42.490437Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037892] server disconnected, pipe [15:7608488363049073144:2479] destroyed 2026-02-19T08:20:42.490503Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:20:42.490530Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:42.490549Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:42.490564Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:42.490584Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:42.490600Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:20:42.542426Z :INFO: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Starting read session 2026-02-19T08:20:42.542485Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Starting cluster discovery 2026-02-19T08:20:42.542767Z :INFO: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26855: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26855
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26855. " 2026-02-19T08:20:42.542816Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Restart cluster discovery in 0.006210s 2026-02-19T08:20:42.550323Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Starting cluster discovery 2026-02-19T08:20:42.550740Z :INFO: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26855: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26855
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26855. " 2026-02-19T08:20:42.550808Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Restart cluster discovery in 0.012525s 2026-02-19T08:20:42.549816Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:20:42.549860Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:42.549878Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:20:42.549899Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:20:42.549915Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:20:42.565926Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Starting cluster discovery 2026-02-19T08:20:42.566182Z :INFO: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26855: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26855
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26855. " 2026-02-19T08:20:42.566222Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Restart cluster discovery in 0.027287s 2026-02-19T08:20:42.596849Z :DEBUG: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Starting cluster discovery 2026-02-19T08:20:42.597169Z :NOTICE: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26855: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26855
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26855. " } 2026-02-19T08:20:42.597858Z :NOTICE: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26855: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26855
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:26855. " } 2026-02-19T08:20:42.598018Z :INFO: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Closing read session. Close timeout: 0.000000s 2026-02-19T08:20:42.598151Z :NOTICE: [/Root] [/Root] [b561cf00-894a2bc2-88bcefad-dcc6ddf6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:20:43.156311Z node 15 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1270} ActorId: [15:7608488367344040480:2489] TxId: 281474976710677. Ctx: { TraceId: 01khtfsbb665b6v8s20f37bzr1, Database: /Root, SessionId: ydb://session/3?node_id=15&id=YjljMzY4YzctOGMzZGQ2N2EtMjBkYTYxMTAtNmQxOWFhYjU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 trace_id# 2026-02-19T08:20:43.156476Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [15:7608488367344040484:2489], TxId: 281474976710677, task: 3. Ctx: { TraceId : 01khtfsbb665b6v8s20f37bzr1. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=15&id=YjljMzY4YzctOGMzZGQ2N2EtMjBkYTYxMTAtNmQxOWFhYjU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7608488367344040480:2489], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 5419771003626709741 2026-02-19T08:20:47.566569Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [5:257:21] SessionId# [2:114:4] Cookie# 17320745974943075299 2026-02-19T08:20:47.566680Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [2:114:4] Inserted# false Subscription# {SessionId# [2:114:4] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-02-19T08:20:47.578107Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 5 Cookie# 17320745974943075299 SessionId# [2:114:4] Binding# {6.0/10408374713548817931@[2:117:5]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {CacheUpdate { } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2026-02-19T08:20:47.599173Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [2:114:4] Inserted# false Subscription# {SessionId# [2:114:4] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-02-19T08:20:47.599242Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:257:21] SessionId# [1:139:4] Cookie# 4267831785508015593 2026-02-19T08:20:47.599301Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [1:139:4] Inserted# false Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} NextSubscribeCookie# 7 2026-02-19T08:20:47.599367Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 4267831785508015593 SessionId# [1:139:4] Binding# {5.2/4267831785508015593@[1:139:4]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-02-19T08:20:47.599446Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:257:21] SessionId# [9:64:5] Cookie# 5084358797545813001 2026-02-19T08:20:47.599499Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [9:64:5] Inserted# false Subscription# {SessionId# [9:64:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.599557Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 5084358797545813001 SessionId# [9:64:5] Binding# {5.2/5084358797545813001@[9:64:5]} Record# {RootNodeId: 2 CacheUpdate { } } 2026-02-19T08:20:47.599596Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:243:21] SessionId# [7:98:3] Cookie# 16584110757472259003 2026-02-19T08:20:47.599625Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [ ... ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.649663Z 1 00h00m00.927721s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.649691Z 1 00h00m00.927721s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.649735Z 1 00h00m00.927721s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.649781Z 1 00h00m00.927721s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.651831Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.651901Z 1 00h00m02.047876s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.651955Z 1 00h00m02.047876s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.651998Z 1 00h00m02.047876s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.652030Z 1 00h00m02.047876s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.652076Z 1 00h00m02.047876s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.652136Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.652175Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.652202Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.652235Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.652262Z 1 00h00m02.047876s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.654428Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.654518Z 1 00h00m04.377798s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.654561Z 1 00h00m04.377798s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.654591Z 1 00h00m04.377798s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.654620Z 1 00h00m04.377798s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.654653Z 1 00h00m04.377798s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.654688Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.654742Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.654771Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.654866Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.654896Z 1 00h00m04.377798s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.659400Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.659524Z 1 00h00m09.037642s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.659574Z 1 00h00m09.037642s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.659605Z 1 00h00m09.037642s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.659635Z 1 00h00m09.037642s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.659670Z 1 00h00m09.037642s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.659728Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.659771Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.659802Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.659832Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.659872Z 1 00h00m09.037642s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.661781Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:47.661890Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:47.661964Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:47.661998Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:47.662048Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:47.662094Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:47.662219Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662280Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662341Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::MergeReply duplicated TEvReplicaInfo cookie:0 replica:[1:24343667:0] signature:154 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662397Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662453Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662492Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::MergeReply duplicated TEvReplicaInfo cookie:1 replica:[1:1099535971443:0] signature:155 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662529Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662588Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662660Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::HandleUpdateSig duplicated TEvReplicaInfo cookie:2 replica:[1:2199047599219:0] signature:156 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662760Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662801Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662841Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::HandleUpdateSig duplicated TEvReplicaInfo cookie:3 replica:[1:3298559226995:0] signature:157 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.662869Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] Test command err: RandomSeed# 17670172796547018138 2026-02-19T08:20:47.704080Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [7:146:1] Cookie# 4811782357968385523 2026-02-19T08:20:47.704180Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:47.711924Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4811782357968385523 SessionId# [7:146:1] Binding# {1.8/4811782357968385523@[7:146:1]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.712070Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {1.8/4811782357968385523@[7:146:1]} Reason# binding cycle 2026-02-19T08:20:47.712134Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {1.8/4811782357968385523@[7:146:1]} 2026-02-19T08:20:47.712232Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} 2026-02-19T08:20:47.712288Z 7 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008271s 2026-02-19T08:20:47.712393Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [9:152:1] Cookie# 10992895103818595 2026-02-19T08:20:47.712441Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.712524Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 10992895103818595 SessionId# [9:152:1] Binding# {1.8/10992895103818595@[9:152:1]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.712573Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [2:131:1] Cookie# 17839301196863990823 2026-02-19T08:20:47.712605Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:47.712658Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 17839301196863990823 SessionId# [2:131:1] Binding# {1.8/17839301196863990823@[2:131:1]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.712716Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [4:137:1] Cookie# 2019441261598451441 2026-02-19T08:20:47.712748Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.712792Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 2019441261598451441 SessionId# [4:137:1] Binding# {1.8/2019441261598451441@[4:137:1]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.713094Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [8:278:21] SessionId# [7:28:7] Cookie# 5599632622611528568 2026-02-19T08:20:47.713138Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [7:28:7] Inserted# false Subscription# {SessionId# [7:28:7] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:47.713173Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 8 Cookie# 5599632622611528568 SessionId# [7:28:7] Binding# 2026-02-19T08:20:47.713227Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 8 Reason# explicit unbind request 2026-02-19T08:20:47.713278Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.6:19001/6 2026-02-19T08:20:47.713339Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.7:19001/7 2026-02-19T08:20:47.713375Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.9:19001/9 2026-02-19T08:20:47.713416Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.4:19001/4 2026-02-19T08:20:47.713442Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:47.713466Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.8:19001/8 2026-02-19T08:20:47.713488Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.2:19001/2 2026-02-19T08:20:47.713512Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.5:19001/5 2026-02-19T08:20:47.713535Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:47.713595Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 8 Subscription# {SessionId# [7:28:7] SubscriptionCookie# 0} 2026-02-19T08:20:47.713682Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:47.713796Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 4 Binding# {4.0/4811782357968385524@[0:0:0]} SessionId# [0:0:0] 2026-02-19T08:20:47.713862Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [8:278:21] SessionId# [1:148:7] Cookie# 12625850840218301431 2026-02-19T08:20:47.713915Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [1:148:7] Inserted# false Subscription# {SessionId# [1:148:7] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-02-19T08:20:47.713978Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 8 Cookie# 12625850840218301431 SessionId# [1:148:7] Binding# {8.7/12625850840218301431@[1:148:7]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.714443Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [7:271:21] SessionId# [1:145:6] Cookie# 4811782357968385523 2026-02-19T08:20:47.714485Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [1:145:6] Inserted# false Subscription# {SessionId# [1:145:6] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-02-19T08:20:47.714528Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 7 Cookie# 4811782357968385523 SessionId# [1:145:6] Binding# {8.8/12625850840218301431@[1:148:7]} 2026-02-19T08:20:47.714577Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 7 Reason# explicit unbind request 2026-02-19T08:20:47.714613Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.6:19001/6 2026-02-19T08:20:47.714651Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.7:19001/7 2026-02-19T08:20:47.714686Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.9:19001/9 2026-02-19T08:20:47.714731Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.4:19001/4 2026-02-19T08:20:47.714760Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:47.714791Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.8:19001/8 2026-02-19T08:20:47.714818Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.2:19001/2 2026-02-19T08:20:47.714848Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.5:19001/5 2026-02-19T08:20:47.714885Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:47.714939Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 7 Subscription# {SessionId# [1:145:6] SubscriptionCookie# 0} 2026-02-19T08:20:47.715029Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:271:21] SessionId# [8:29:7] Cookie# 5599632622611528568 2026-02-19T08:20:47.715082Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 5599632622611528568 SessionId# [8:29:7] Binding# Record# {RootNodeId: 7 } 2026-02-19T08:20:47.715123Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [3:109:2] Cookie# 11503050864718271381 2026-02-19T08:20:47.715176Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:47.715227Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 11503050864718271381 SessionId# [3:109:2] Binding# {2.8/11503050864718271381@[3:109:2]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.715272Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [5:115:2] Cookie# 1240581038276695356 2026-02-19T08:20:47.715305Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.715347Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 1240581038276695356 SessionId# [5:115:2] Binding# {2.8/1240581038276695356@[5:115:2]} Record# {RootNodeId: 7 } 2026-02-19T08:20:47.715404Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [7:77:4] SessionId# [0:0:0] Cookie# 6 2026-02-19T08:20:47.715474Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 4 SessionId# [7:77:4] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-02-19T08:20:47.715542Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {4.0/4811782357968385524@[0:0:0]} 2026-02-19T08:20:47.716744Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [7:146:1] Cookie# 4811782357968385523 2026-02-19T08:20:47.716835Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4811782357968385523 SessionId# [7:146:1] Binding# {4.0/4811782357968385524@[7:77:4]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.716891Z 9 00h00m00.000000s :BS_NODE DEBUG: {NW ... 19T08:20:47.788548Z 1 00h00m01.545444s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.788593Z 1 00h00m01.545444s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.788635Z 1 00h00m01.545444s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.788694Z 1 00h00m01.545444s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.788723Z 1 00h00m01.545444s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.788751Z 1 00h00m01.545444s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.788783Z 1 00h00m01.545444s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.790600Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.790666Z 1 00h00m03.355439s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.790704Z 1 00h00m03.355439s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.790737Z 1 00h00m03.355439s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.790823Z 1 00h00m03.355439s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.790857Z 1 00h00m03.355439s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.790899Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.790950Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.790985Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.791020Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.791057Z 1 00h00m03.355439s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.796769Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.796876Z 1 00h00m07.446027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.796932Z 1 00h00m07.446027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.796969Z 1 00h00m07.446027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.796999Z 1 00h00m07.446027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.797030Z 1 00h00m07.446027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.797094Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.797144Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.797174Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.797205Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.797243Z 1 00h00m07.446027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.800613Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:47.800740Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:47.800817Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:47.800852Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:47.800907Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:47.800938Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:47.800998Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.801083Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.801118Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.801228Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.801327Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 10 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:47.801370Z 1 00h00m10.002048s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=10 Info->ClusterStateGuid=0 clusterStateGuid=0 2026-02-19T08:20:47.801472Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 10 ClusterStateGuid: 0 2026-02-19T08:20:47.807363Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.807489Z 1 00h00m16.527132s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.807550Z 1 00h00m16.527132s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.807585Z 1 00h00m16.527132s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.807634Z 1 00h00m16.527132s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.807668Z 1 00h00m16.527132s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.807734Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.807797Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.807824Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.807859Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.807901Z 1 00h00m16.527132s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.825813Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:47.825894Z 1 00h00m35.234208s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:47.825945Z 1 00h00m35.234208s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:47.825966Z 1 00h00m35.234208s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:47.826016Z 1 00h00m35.234208s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:47.826058Z 1 00h00m35.234208s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:47.826113Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.826164Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.826195Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.826226Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:47.826299Z 1 00h00m35.234208s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 11207820335400234007 2026-02-19T08:20:47.743957Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [4:112:2] Cookie# 9279436693728767978 2026-02-19T08:20:47.744035Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:47.750581Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 9279436693728767978 SessionId# [4:112:2] Binding# {2.0/9279436693728767978@[4:112:2]} Record# {RootNodeId: 1 } 2026-02-19T08:20:47.750687Z 4 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006774s 2026-02-19T08:20:47.750747Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [8:124:2] Cookie# 17873459948224825880 2026-02-19T08:20:47.750803Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [8:124:2] Inserted# false Subscription# {SessionId# [8:124:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-02-19T08:20:47.750863Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 17873459948224825880 SessionId# [8:124:2] Binding# {2.1/17873459948224825880@[8:124:2]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.750901Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {2.1/17873459948224825880@[8:124:2]} Reason# binding cycle 2026-02-19T08:20:47.750958Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {2.1/17873459948224825880@[8:124:2]} 2026-02-19T08:20:47.751015Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 2 Subscription# {SessionId# [8:124:2] SubscriptionCookie# 0} 2026-02-19T08:20:47.751070Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-02-19T08:20:47.751113Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 2 Binding# {2.0/17873459948224825881@[0:0:0]} SessionId# [0:0:0] 2026-02-19T08:20:47.751153Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [9:127:2] Cookie# 5629826716753675916 2026-02-19T08:20:47.751176Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [9:127:2] Inserted# false Subscription# {SessionId# [9:127:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:47.751206Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 5629826716753675916 SessionId# [9:127:2] Binding# {2.1/5629826716753675916@[9:127:2]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.751243Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [3:109:2] Cookie# 16093979552028232137 2026-02-19T08:20:47.751268Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:47.751310Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 16093979552028232137 SessionId# [3:109:2] Binding# {2.1/16093979552028232137@[3:109:2]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.751340Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [4:112:2] Cookie# 9279436693728767978 2026-02-19T08:20:47.751373Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:47.751420Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 9279436693728767978 SessionId# [4:112:2] Binding# {2.1/9279436693728767978@[4:112:2]} Record# {RootNodeId: 8 } 2026-02-19T08:20:47.751511Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:229:37] SessionId# [7:146:1] Cookie# 16582445318812385195 2026-02-19T08:20:47.751783Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 16582445318812385195 SessionId# [7:146:1] Binding# {4.4/1706549218233813655@[7:77:4]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorage2RingGroups::TestStateStorageReplyOnce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] Test command err: RandomSeed# 7000693886352518088 2026-02-19T08:20:47.971779Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:250:21] SessionId# [5:71:4] Cookie# 14698088284325781452 2026-02-19T08:20:47.971877Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:47.979899Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 14698088284325781452 SessionId# [5:71:4] Binding# {4.3/14698088284325781452@[5:71:4]} Record# {RootNodeId: 1 } 2026-02-19T08:20:47.980039Z 5 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008321s 2026-02-19T08:20:47.980112Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [5:257:21] SessionId# [4:70:4] Cookie# 14698088284325781452 2026-02-19T08:20:47.980197Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [4:70:4] Inserted# false Subscription# {SessionId# [4:70:4] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:47.980420Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 5 Cookie# 14698088284325781452 SessionId# [4:70:4] Binding# {3.1/15777961626140649278@[4:89:3]} Record# {BoundNodes { NodeId { Host: "127.0.0.4" Port: 19001 NodeId: 4 } Meta { Fingerprint: "\3403\207\365\032> Record# {BoundNodes { NodeId { Host: "127.0.0.3" Port: 19001 NodeId: 3 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 1 StorageConfigGe ... leLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.065457Z 1 00h00m01.764061s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.065491Z 1 00h00m01.764061s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.065522Z 1 00h00m01.764061s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.065556Z 1 00h00m01.764061s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.067672Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.067751Z 1 00h00m03.659031s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.067816Z 1 00h00m03.659031s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.067856Z 1 00h00m03.659031s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.067889Z 1 00h00m03.659031s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.067963Z 1 00h00m03.659031s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.068000Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.068054Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.068087Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.068136Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.068176Z 1 00h00m03.659031s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.072311Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.072412Z 1 00h00m07.903763s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.072449Z 1 00h00m07.903763s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.072480Z 1 00h00m07.903763s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.072499Z 1 00h00m07.903763s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.072543Z 1 00h00m07.903763s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.072580Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.072616Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.072634Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.072651Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.072668Z 1 00h00m07.903763s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.074884Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.074967Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.075010Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.075040Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:48.075085Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:48.075109Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:48.075161Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.075203Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.075228Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.079985Z 1 00h00m17.327068s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.080108Z 1 00h00m17.327068s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.080162Z 1 00h00m17.327068s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.080197Z 1 00h00m17.327068s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.080224Z 1 00h00m17.327068s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.080249Z 1 00h00m17.327068s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.080297Z 1 00h00m17.327068s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.080358Z 1 00h00m17.327068s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 2} 2026-02-19T08:20:48.080407Z 1 00h00m17.327068s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-02-19T08:20:48.080513Z 1 00h00m17.327068s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-02-19T08:20:48.083899Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.084030Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.084098Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.084133Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:48.084171Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:48.084205Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:48.084316Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.084391Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 2 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.084422Z 1 00h00m20.100000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2026-02-19T08:20:48.084529Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2026-02-19T08:20:48.084647Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.084691Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-02-19T08:20:48.084757Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-02-19T08:20:48.084862Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72057594037932033} 2026-02-19T08:20:48.084924Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-02-19T08:20:48.085002Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-02-19T08:20:48.088633Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: NKikimrStateStorage.TEvCleanup TabletID: 72057594037932033 ProposedLeader { RawX1: 0 RawX2: 0 } ClusterStateGeneration: 3 ClusterStateGuid: 4 2026-02-19T08:20:48.088726Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2026-02-19T08:20:48.088812Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] Test command err: RandomSeed# 1923581781412593143 2026-02-19T08:20:48.181266Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [9:152:1] SessionId# [0:0:0] Cookie# 8 2026-02-19T08:20:48.181331Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 1 SessionId# [9:152:1] Cookie# 8 CookieInFlight# true SubscriptionExists# true 2026-02-19T08:20:48.181396Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [9:300:21] SessionId# [7:31:8] Cookie# 585564180403048894 2026-02-19T08:20:48.188557Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 9 Cookie# 585564180403048894 SessionId# [7:31:8] Binding# {3.1/14792499989560564486@[7:98:3]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.9" Port: 19001 NodeId: 9 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 328340531003144637 2026-02-19T08:20:48.212111Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [3:243:21] SessionId# [2:108:2] Cookie# 1234251888188065174 2026-02-19T08:20:48.212182Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.212241Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 3 Cookie# 1234251888188065174 SessionId# [2:108:2] Binding# {1.3/1579977949866348666@[2:131:1]} 2026-02-19T08:20:48.212309Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 3 Reason# explicit unbind request 2026-02-19T08:20:48.212347Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.6:19001/6 2026-02-19T08:20:48.212399Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.7:19001/7 2026-02-19T08:20:48.212424Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.9:19001/9 2026-02-19T08:20:48.212457Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.4:19001/4 2026-02-19T08:20:48.212488Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:48.212523Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.8:19001/8 2026-02-19T08:20:48.212551Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.2:19001/2 2026-02-19T08:20:48.212588Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.5:19001/5 2026-02-19T08:20:48.212616Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:48.212680Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 3 Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} 2026-02-19T08:20:48.212771Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:243:21] SessionId# [1:133:2] Cookie# 6370056034412260665 2026-02-19T08:20:48.219583Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 6370056034412260665 SessionId# [1:133:2] Binding# Record# {RootNodeId: 3 } 2026-02-19T08:20:48.219673Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006902s 2026-02-19T08:20:48.219737Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:257:21] SessionId# [7:58:5] Cookie# 2420991879635598527 2026-02-19T08:20:48.219782Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [7:58:5] Inserted# false Subscription# {SessionId# [7:58:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:48.219850Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 2420991879635598527 SessionId# [7:58:5] Binding# {5.1/2420991879635598527@[7:58:5]} Record# {RootNodeId: 3 } 2026-02-19T08:20:48.219928Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:257:21] SessionId# [6:55:5] Cookie# 5101168343881082290 2026-02-19T08:20:48.219966Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [6:55:5] Inserted# false Subscription# {SessionId# [6:55:5] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:48.220024Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 5101168343881082290 SessionId# [6:55:5] Binding# {5.1/5101168343881082290@[6:55:5]} Record# {RootNodeId: 3 } 2026-02-19T08:20:48.220079Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:229:37] SessionId# [3:134:1] Cookie# 6370056034412260665 2026-02-19T08:20:48.220110Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.220140Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 6370056034412260665 SessionId# [3:134:1] Binding# 2026-02-19T08:20:48.220175Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-02-19T08:20:48.220208Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2026-02-19T08:20:48.220249Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-02-19T08:20:48.220285Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-02-19T08:20:48.220327Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-02-19T08:20:48.220353Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:48.220381Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2026-02-19T08:20:48.220406Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-02-19T08:20:48.220430Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-02-19T08:20:48.220453Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:48.220510Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} 2026-02-19T08:20:48.220602Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.220658Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 2 Binding# {2.0/1234251888188065175@[0:0:0]} SessionId# [0:0:0] 2026-02-19T08:20:48.220736Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:229:37] SessionId# [2:131:1] Cookie# 1579977949866348666 2026-02-19T08:20:48.220784Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.220866Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 1579977949866348666 SessionId# [2:131:1] Binding# {1.3/1579977949866348666@[2:131:1]} Record# {RootNodeId: 1 } 2026-02-19T08:20:48.221319Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [3:109:2] SessionId# [0:0:0] Cookie# 6 2026-02-19T08:20:48.221366Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 2 SessionId# [3:109:2] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2026-02-19T08:20:48.221410Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {2.0/1234251888188065175@[0:0:0]} 2026-02-19T08:20:48.222370Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:236:21] SessionId# [1:130:1] Cookie# 1579977949866348666 2026-02-19T08:20:48.222436Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:48.222575Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 1579977949866348666 SessionId# [1:130:1] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2026-02-19T08:20:48.222623Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:48.222665Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:48.222708Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:48.222842Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 4 2026-02-19T08:20:48.222904Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 3 Binding# {3.0/6370056034412260666@[0:0:0]} SessionId# [0:0:0] 2026-02-19T08:20:48.222954Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [6:264:21] SessionId# [8:44:6] Cookie# 9517944803916860623 2026-02-19T08:20:48.223004Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [8:44:6] Inserted# false Subscription# {SessionId# [8:44:6] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.223073Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 6 Cookie# 9517944803916860623 SessionId# [8:44:6] Binding# {6.1/9517944803916860623@[8:44:6]} Record# {RootNodeId: 3 } 2026-02-19T08:20:48.223116Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [6:264:21] SessionId# [9:47:6] Cookie# 860574257095727242 2026-02-19T08:20:48.223146Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 6 SessionId# [9:47:6] Inserted# false Subscription# {SessionId# [9:47:6] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:48.223186Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 6 Cookie# 860574257095727242 SessionId# [9:47:6] Binding# {6.1/860574257095727242@[9:47:6]} Record# {RootNodeId: 3 } 2026-02-19T08:20:48.223268Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:236:21] SessionId# [4:112:2] Cookie# 7028213126997066828 2026-02-19T08:20:48.223301Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:48.223340Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 7028213126997066828 SessionId# [4:112:2] Binding# {2.3/70 ... 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.294815Z 1 00h00m00.360610s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.294848Z 1 00h00m00.360610s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.294874Z 1 00h00m00.360610s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.294905Z 1 00h00m00.360610s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.294934Z 1 00h00m00.360610s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.294969Z 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295010Z 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295056Z 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295085Z 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295113Z 1 00h00m00.360610s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295382Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.295475Z 1 00h00m00.779228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.295508Z 1 00h00m00.779228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.295534Z 1 00h00m00.779228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.295562Z 1 00h00m00.779228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.295599Z 1 00h00m00.779228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.295648Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295681Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295729Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295763Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.295790Z 1 00h00m00.779228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.296833Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.296913Z 1 00h00m01.649953s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.296974Z 1 00h00m01.649953s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.297009Z 1 00h00m01.649953s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.297036Z 1 00h00m01.649953s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.297063Z 1 00h00m01.649953s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.297099Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.297132Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.297162Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.297189Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.297217Z 1 00h00m01.649953s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.298960Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.299040Z 1 00h00m03.513304s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.299135Z 1 00h00m03.513304s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.299167Z 1 00h00m03.513304s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.299200Z 1 00h00m03.513304s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.299249Z 1 00h00m03.513304s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.299292Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.299333Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.299361Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.299388Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.299415Z 1 00h00m03.513304s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.302970Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.303055Z 1 00h00m07.761744s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.303098Z 1 00h00m07.761744s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.303128Z 1 00h00m07.761744s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.303169Z 1 00h00m07.761744s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.303196Z 1 00h00m07.761744s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.303291Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.303343Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.303383Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.303411Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.303446Z 1 00h00m07.761744s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.306471Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:48.306582Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.306644Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.306674Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:48.306723Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:48.306775Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:48.306839Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.306893Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.306958Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.306995Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig undelivered ringGroup:0 for: 3 2026-02-19T08:20:48.307068Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2026-02-19T08:20:08.546398Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488216953259012:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:08.547333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:08.929252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:08.997283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:08.997423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:09.129897Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488216953258775:2081] 1771489208500222 != 1771489208500225 2026-02-19T08:20:09.162334Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:09.165865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:09.183741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:09.469451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:09.469472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:09.469487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:09.469576Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:09.477583Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:09.587900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:09.791843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:09.888658Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-02-19T08:20:09.888695Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-02-19T08:20:09.889166Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:875: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****N0Pg (FD66584C)) 2026-02-19T08:20:12.482159Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****N0Pg (FD66584C) () has now permanent error message 'Login state is not available' 2026-02-19T08:20:12.482261Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2360: Finish waiting for login providers for 1 databases: /Root/Db1, 2026-02-19T08:20:13.858383Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:13.903197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:13.985866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:14.050260Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:14.054044Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488238724388498:2081] 1771489213753076 != 1771489213753079 2026-02-19T08:20:14.075511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:14.075587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:14.115446Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:14.209942Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:14.442284Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:14.442311Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:14.442317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:14.442387Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:14.716739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:14.727448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:14.857854Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:14.890974Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:20:14.891187Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2026-02-19T08:20:14.891197Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:20:14.891298Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2026-02-19T08:20:14.891308Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:827: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2026-02-19T08:20:14.891330Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket **** (5DAB89DE): Token is not in correct format 2026-02-19T08:20:18.861928Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:18.862111Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:18.865305Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:18.865383Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:18.869562Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488257760718207:2081] 1771489218678120 != 1771489218678123 2026-02-19T08:20:18.886212Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:18.898320Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:18.977533Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:18.977552Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:18.977557Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:18.977619Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:19.062953Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:19.219878Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:19.230310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:19.239764Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:19.239844Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c75d4c7d5d0] Connect to grpc://localhost:21552 2026-02-19T08:20:19.244730Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4c7d5d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:19.261917Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c75d4c7d5d0] Status 14 Service Unavailable 2026-02-19T08:20:19.262472Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:20:19.262510Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:19.262605Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:19.262884Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4c7d5d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { ... e 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1069: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-02-19T08:20:30.546387Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of login1@passport 2026-02-19T08:20:30.673414Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:36.063707Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:36.070017Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:36.286336Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:36.318067Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:36.379052Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:36.379156Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:36.394633Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:36.574520Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:36.574552Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:36.574560Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:36.574646Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:36.861685Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:36.948129Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:36.962609Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:36.970133Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2026-02-19T08:20:36.970194Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c75d4d49e50] Connect to grpc://localhost:29639 2026-02-19T08:20:36.971345Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4d49e50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2026-02-19T08:20:36.979350Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:37.007993Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c75d4d49e50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2026-02-19T08:20:37.008303Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1397: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2026-02-19T08:20:37.008325Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1397: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2026-02-19T08:20:37.008344Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1397: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2026-02-19T08:20:37.008360Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1397: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2026-02-19T08:20:37.008380Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1069: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-02-19T08:20:37.008565Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c75d4d4a1d0] Connect to grpc://localhost:13428 2026-02-19T08:20:37.009664Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4d4a1d0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-02-19T08:20:37.030566Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c75d4d4a1d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-02-19T08:20:37.031024Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of login1@passport 2026-02-19T08:20:41.817819Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:41.817985Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:41.994864Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:42.002136Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608488357329149035:2081] 1771489241677421 != 1771489241677424 2026-02-19T08:20:42.018511Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:42.018607Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:42.048275Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:42.225623Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:42.243396Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:42.243426Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:42.243450Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:42.243567Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:42.668903Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:42.683163Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:42.685371Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-02-19T08:20:42.685426Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c75d4c5c550] Connect to grpc://localhost:18544 2026-02-19T08:20:42.686455Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4c5c550] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-02-19T08:20:42.707568Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c75d4c5c550] Status 14 Service Unavailable 2026-02-19T08:20:42.708183Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:20:42.708213Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:20:42.708245Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:42.708348Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-02-19T08:20:42.708666Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c75d4c5c550] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-02-19T08:20:42.722515Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c75d4c5c550] Status 14 Service Unavailable 2026-02-19T08:20:42.724006Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:20:42.724031Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:20:42.724060Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:42.737869Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 3231492253731583668 2026-02-19T08:20:48.153216Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153324Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153361Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153400Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153433Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153478Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153527Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153559Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153591Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.153696Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.157521Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:349:57] SessionId# [0:0:0] Cookie# 0 2026-02-19T08:20:48.157910Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.158004Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:244:52] SessionId# [8:149:1] Cookie# 16424230221066836484 2026-02-19T08:20:48.158056Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.165516Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 16424230221066836484 SessionId# [8:149:1] Binding# {3.3/11127675301752287418@[8:101:3]} Record# {CacheUpdate { } } RootNodeId# 3 StorageConfigGeneration# 0 KnownNode# true 2026-02-19T08:20:48.165633Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.165752Z 8 00h00m00.002048s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007743s 2026-02-19T08:20:48.165823Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:244:52] SessionId# [2:131:1] Cookie# 16948611747734426116 2026-02-19T08:20:48.165871Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.165991Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 16948611747734426116 SessionId# [2:131:1] Binding# {1.3/16948611747734426116@[2:131:1]} Record# {RootNodeId: 3 CacheUpdate { } } 2026-02-19T08:20:48.166061Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:244:52] SessionId# [4:137:1] Cookie# 15602052765397368246 2026-02-19T08:20:48.166123Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.166181Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 15602052765397368246 SessionId# [4:137:1] Binding# {1.3/15602052765397368246@[4:137:1]} Record# {RootNodeId: 3 CacheUpdate { } } 2026-02-19T08:20:48.166257Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.166359Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:20:48.166562Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.166626Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.166658Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.166688Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.166742Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.166788Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-02-19T08:20:48.166831Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.166861Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.166895Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.166925Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.167017Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167072Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167108Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167137Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167164Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167246Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167296Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167347Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167376Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167418Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.167498Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [8:293:21] SessionId# [3:100:7] Cookie# 11127675301752287418 2026-02-19T08:20:48.167539Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [3:100:7] Inserted# false Subscription# {SessionId# [3:100:7] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-02-19T08:20:48.167612Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 8 Cookie# 11127675301752287418 SessionId# [3:100:7] Binding# Record# {CacheUpdate { } } RootNodeId# 3 StorageConfigGeneration# 0 KnownNode# true 2026-02-19T08:20:48.167652Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [3:100:7] Inserted# false Subscription# {SessionId# [3:100:7] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-02-19T08:20:48.167711Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [8:293:21] SessionId# [1:148:7] Cookie# 16424230221066836484 2026-02-19T08:20:48.167742Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [1:148:7] Inserted# false Subscription# {SessionId# [1:148:7] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.167819Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 8 Cookie# 16424230221066836484 SessionId# [1:148:7] Binding# {8.3/16424230221066836484@[1:148:7]} Record# {RootNodeId: 3 CacheUpdate { } } 2026-02-19T08:20:48.167888Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:251:21] SessionId# [7:121:2] Cookie# 8201421955371964 2026-02-19T08:20:48.167928Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [7:121:2] Inserted# false Subscription# {SessionId# [7:121:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2026-02-19T08:20:48.167988Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 8201421955371964 SessionId# [7:121:2] Binding# {2.3/8201421955371964@[7:121:2]} Record# {RootNodeId: 3 CacheUpdate { } } 2026-02-19T08:20:48.168040Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:265:21] SessionId# [9:83:4] Cookie# 17854523203120152298 2026-02-19T08:20:48.168085Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2026-02-19T08:20:48.168165Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 17854523203120152298 SessionId# [9:83:4] Binding# {4.3/17854523203120152298@[9:83:4]} Record# {RootNodeId: 3 CacheUpdate { } } 2026-02-19T08:20:48.168284Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.168380Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false Loc ... E DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-02-19T08:20:48.247084Z 1 00h00m08.595339s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-02-19T08:20:48.247131Z 1 00h00m08.595339s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2026-02-19T08:20:48.247166Z 1 00h00m08.595339s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2026-02-19T08:20:48.247219Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247266Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247312Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247347Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247397Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247472Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247523Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247558Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247595Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247622Z 1 00h00m08.595339s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-02-19T08:20:48.247732Z 1 00h00m08.595339s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.247811Z 1 00h00m08.595339s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}, {[1:3298559226995:0] : 157}}}} 2026-02-19T08:20:48.247893Z 1 00h00m08.595339s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2026-02-19T08:20:48.247968Z 1 00h00m08.595339s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037936131 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.250539Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:48.250679Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:48.250753Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2026-02-19T08:20:48.250835Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.250894Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.250925Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:48.250980Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:48.251017Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:48.251048Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-02-19T08:20:48.251087Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-02-19T08:20:48.251116Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-02-19T08:20:48.251145Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2026-02-19T08:20:48.251177Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2026-02-19T08:20:48.251265Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251336Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251378Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251440Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251518Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251588Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251628Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251674Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251736Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251781Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2026-02-19T08:20:48.251869Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.252005Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.252098Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.252229Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2026-02-19T08:20:48.252325Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2026-02-19T08:20:48.252410Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 7282851111626812713 2026-02-19T08:20:48.321345Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:271:21] SessionId# [1:145:6] Cookie# 588324247196326425 2026-02-19T08:20:48.321431Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [1:145:6] Inserted# false Subscription# {SessionId# [1:145:6] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.329091Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 588324247196326425 SessionId# [1:145:6] Binding# {7.5/588324247196326425@[1:145:6]} Record# {RootNodeId: 1 } 2026-02-19T08:20:48.329212Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {7.5/588324247196326425@[1:145:6]} Reason# binding cycle 2026-02-19T08:20:48.329282Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {7.5/588324247196326425@[1:145:6]} 2026-02-19T08:20:48.329374Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 7 Subscription# {SessionId# [1:145:6] SubscriptionCookie# 0} 2026-02-19T08:20:48.329437Z 1 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008145s 2026-02-19T08:20:48.329535Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:229:37] SessionId# [7:146:1] Cookie# 588324247196326425 2026-02-19T08:20:48.329598Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.331327Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 588324247196326425 SessionId# [7:146:1] Binding# {5.1/8928551085726841552@[7:58:5]} Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032> Record# {BoundNodes { NodeId { Host: "127.0.0.8" Port: 19001 NodeId: 8 } Meta { Fingerprint: "\3403\207\365\032>> YdbTableSplit::RenameTablesAndSplit [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 6646668613597782064 2026-02-19T08:20:48.966791Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:148:7] SessionId# [0:0:0] Cookie# 7 2026-02-19T08:20:48.966875Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 8 SessionId# [1:148:7] Cookie# 7 CookieInFlight# true SubscriptionExists# true 2026-02-19T08:20:48.966945Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {8.0/14102378488344780979@[0:0:0]} 2026-02-19T08:20:48.967170Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:244:52] SessionId# [6:143:1] Cookie# 14102378488344780978 2026-02-19T08:20:48.967222Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.967277Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 14102378488344780978 SessionId# [6:143:1] Binding# {5.1/14442747030887114988@[6:55:5]} 2026-02-19T08:20:48.967323Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2026-02-19T08:20:48.967363Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2026-02-19T08:20:48.967415Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2026-02-19T08:20:48.967447Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2026-02-19T08:20:48.967495Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2026-02-19T08:20:48.967542Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2026-02-19T08:20:48.967591Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2026-02-19T08:20:48.967648Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2026-02-19T08:20:48.967703Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} 2026-02-19T08:20:48.967780Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:244:52] SessionId# [2:131:1] Cookie# 10801174770944423240 2026-02-19T08:20:48.967817Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2026-02-19T08:20:48.975135Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 10801174770944423240 SessionId# [2:131:1] Binding# {1.6/10801174770944423240@[2:131:1]} Record# {RootNodeId: 1 } 2026-02-19T08:20:48.975252Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007465s 2026-02-19T08:20:48.975317Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:244:52] SessionId# [4:137:1] Cookie# 78203134710355590 2026-02-19T08:20:48.975367Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2026-02-19T08:20:48.975440Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 78203134710355590 SessionId# [4:137:1] Binding# {1.6/78203134710355590@[4:137:1]} Record# {RootNodeId: 1 } 2026-02-19T08:20:48.975585Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:251:21] SessionId# [1:130:1] Cookie# 10801174770944423240 2026-02-19T08:20:48.975649Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2026-02-19T08:20:48.975922Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 10801174770944423240 SessionId# [1:130:1] Binding# {8.0/14102378488344780979@[1:148:7]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 15366, msgbus: 25467 2026-02-19T08:14:26.713977Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608486747867543904:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:14:26.717200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:14:26.967997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:14:26.995957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:14:26.996044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:14:27.036393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:14:27.068540Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:14:27.186067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:14:27.186097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:14:27.186111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:14:27.186223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:14:27.266845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:14:27.374643Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608486747867544108:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:14:27.374698Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608486752162512179:2729] HANDLE EvNavigateScheme dc-1 2026-02-19T08:14:27.375025Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608486752162512179:2729] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.421805Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608486752162512179:2729] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-02-19T08:14:27.428018Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608486752162512179:2729] Handle TEvDescribeSchemeResult Forward to# [1:7608486752162512178:2728] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:14:27.432178Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486747867544108:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.432207Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486747867544108:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:14:27.432272Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486747867544108:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608486752162512184:2734] 2026-02-19T08:14:27.527406Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486752162512184:2734] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:27.527506Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486752162512184:2734] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:27.527526Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486752162512184:2734] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:27.527599Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486752162512184:2734] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:27.527887Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486752162512184:2734] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.527990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486752162512184:2734] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-02-19T08:14:27.528072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486752162512184:2734] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:14:27.528205Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486752162512184:2734] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:14:27.528855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:14:27.532720Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486752162512184:2734] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:14:27.532777Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486752162512184:2734] txid# 281474976710657 SEND to# [1:7608486752162512183:2733] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:14:27.548069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:14:27.550594Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486747867544108:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.550616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608486747867544108:2143] TxId# 281474976710658 ProcessProposeTransaction 2026-02-19T08:14:27.550664Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608486747867544108:2143] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7608486752162512223:2770] 2026-02-19T08:14:27.552495Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608486752162512223:2770] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-02-19T08:14:27.552563Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:7608486752162512223:2770] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:14:27.552605Z node 1 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [1:7608486752162512223:2770] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-02-19T08:14:27.552665Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:7608486752162512223:2770] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:14:27.552942Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:7608486752162512223:2770] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:14:27.553042Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:7608486752162512223:2770] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:14:27.553096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7608486752162512223:2770] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:14:27.553234Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:7608486752162512223:2770] txid# 281474976710658 HANDLE EvClientConnected 2026-02-19T08:14:27.553819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:14:27.556761Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:7608486752162512223:2770] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-02-19T08:14:27.556806Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:7608486752162512223:2770] txid# 281474976710658 SEND to# [1:7608486752162512222:2769] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-02-19T08:14:27.602508Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608486747867544108:2143] Handle TEvProposeTransaction 2026-02-19T08:14:27.602538Z node 1 :TX_PROXY DE ... ZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51792" 2026-02-19T08:20:39.528578Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488348597996557:3043] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:39.528600Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488348597996557:3043] txid# 281474976710665 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:39.528755Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488348597996557:3043] txid# 281474976710665 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:39.528811Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488348597996557:3043] txid# 281474976710665 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:39.528862Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488348597996557:3043] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:39.529136Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488348597996557:3043] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:39.529163Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488348597996557:3043] txid# 281474976710665, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:39.529258Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488348597996557:3043] txid# 281474976710665, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:39.529287Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488348597996557:3043] txid# 281474976710665 SEND to# [59:7608488348597996556:2355] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:39.529742Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmFlMjQyZGQtOTIxODMxYmYtNmZjYzI4My01MmM2ZGM5YQ==, ActorId: [59:7608488340008061907:2355], ActorState: ExecuteState, LegacyTraceId: 01khtfs8ekf4x948pbc9ktmhrw, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:39.531831Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488314238256788:2136] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:39.531860Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488314238256788:2136] TxId# 281474976710666 ProcessProposeKqpTransaction 2026-02-19T08:20:41.961212Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488314238256788:2136] Handle TEvProposeTransaction 2026-02-19T08:20:41.961243Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488314238256788:2136] TxId# 281474976710667 ProcessProposeTransaction 2026-02-19T08:20:41.961290Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488314238256788:2136] Cookie# 0 userReqId# "" txid# 281474976710667 SEND to# [59:7608488357187931205:3070] 2026-02-19T08:20:41.963498Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488357187931205:3070] txid# 281474976710667 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQzNywiaWF0IjoxNzcxNDg5MjM3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.AkFdUQ0ds3N1wAnmAkhf0S--UB-6A5rgFJFpZaocKod4CeiiGY4IkvId3-LKm1ThLYkLZuHX5xqhmeHENzIen99DbxJ_xckOIAIvICrlxkd7pnYKNM4OxliOu7c9yClE5O7HvmXY8swk8KKJsaukReWDYwlA35jU8gCxSzXT42NFEp9LQPpziXZBgoNbq0sKmmqdiztDBpO_tiV4xlubSTOmUfYLevBb16zlBwGD_4Bt5YgZCNHhnKiEgGZcVUkon2z8k2CT1Wi5jO601G0dswLRWOYMs51U0UuZvKwO-nylgc6roN1XYblWIBi9zXWyFxDogb25SYVcF21HK5FReg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQzNywiaWF0IjoxNzcxNDg5MjM3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51792" 2026-02-19T08:20:41.963577Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488357187931205:3070] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:41.963601Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488357187931205:3070] txid# 281474976710667 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:41.963757Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488357187931205:3070] txid# 281474976710667 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:41.963816Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488357187931205:3070] txid# 281474976710667 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:41.963868Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488357187931205:3070] txid# 281474976710667 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:41.964143Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488357187931205:3070] txid# 281474976710667 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:41.964168Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488357187931205:3070] txid# 281474976710667, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:41.964268Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488357187931205:3070] txid# 281474976710667, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:41.964296Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488357187931205:3070] txid# 281474976710667 SEND to# [59:7608488357187931204:2355] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:41.965327Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmFlMjQyZGQtOTIxODMxYmYtNmZjYzI4My01MmM2ZGM5YQ==, ActorId: [59:7608488340008061907:2355], ActorState: ExecuteState, LegacyTraceId: 01khtfsated0ta5m7vbm686mew, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:41.967536Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488314238256788:2136] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:41.967566Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488314238256788:2136] TxId# 281474976710668 ProcessProposeKqpTransaction 2026-02-19T08:20:46.401571Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:20:46.401613Z node 59 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:46.678845Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7608488314238256788:2136] Handle TEvProposeTransaction 2026-02-19T08:20:46.678896Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7608488314238256788:2136] TxId# 281474976710669 ProcessProposeTransaction 2026-02-19T08:20:46.678965Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7608488314238256788:2136] Cookie# 0 userReqId# "" txid# 281474976710669 SEND to# [59:7608488378662767771:3107] 2026-02-19T08:20:46.681548Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [59:7608488378662767771:3107] txid# 281474976710669 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQzNywiaWF0IjoxNzcxNDg5MjM3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.AkFdUQ0ds3N1wAnmAkhf0S--UB-6A5rgFJFpZaocKod4CeiiGY4IkvId3-LKm1ThLYkLZuHX5xqhmeHENzIen99DbxJ_xckOIAIvICrlxkd7pnYKNM4OxliOu7c9yClE5O7HvmXY8swk8KKJsaukReWDYwlA35jU8gCxSzXT42NFEp9LQPpziXZBgoNbq0sKmmqdiztDBpO_tiV4xlubSTOmUfYLevBb16zlBwGD_4Bt5YgZCNHhnKiEgGZcVUkon2z8k2CT1Wi5jO601G0dswLRWOYMs51U0UuZvKwO-nylgc6roN1XYblWIBi9zXWyFxDogb25SYVcF21HK5FReg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc3MTUzMjQzNywiaWF0IjoxNzcxNDg5MjM3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51792" 2026-02-19T08:20:46.681632Z node 59 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [59:7608488378662767771:3107] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-02-19T08:20:46.681655Z node 59 :TX_PROXY DEBUG: schemereq.cpp:659: Actor# [59:7608488378662767771:3107] txid# 281474976710669 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-02-19T08:20:46.681879Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1592: Actor# [59:7608488378662767771:3107] txid# 281474976710669 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-02-19T08:20:46.681939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1627: Actor# [59:7608488378662767771:3107] txid# 281474976710669 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-02-19T08:20:46.681994Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [59:7608488378662767771:3107] txid# 281474976710669 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:20:46.682320Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [59:7608488378662767771:3107] txid# 281474976710669 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:20:46.682349Z node 59 :TX_PROXY ERROR: schemereq.cpp:1245: Actor# [59:7608488378662767771:3107] txid# 281474976710669, Access denied for ordinaryuser, attempt to manage user 2026-02-19T08:20:46.682455Z node 59 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [59:7608488378662767771:3107] txid# 281474976710669, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-02-19T08:20:46.682487Z node 59 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [59:7608488378662767771:3107] txid# 281474976710669 SEND to# [59:7608488378662767770:2355] Source {TEvProposeTransactionStatus Status# 5} 2026-02-19T08:20:46.683009Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=59&id=NmFlMjQyZGQtOTIxODMxYmYtNmZjYzI4My01MmM2ZGM5YQ==, ActorId: [59:7608488340008061907:2355], ActorState: ExecuteState, LegacyTraceId: 01khtfsfdjdhvtn9cszzf9s6t1, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-02-19T08:20:46.683628Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7608488314238256788:2136] Handle TEvExecuteKqpTransaction 2026-02-19T08:20:46.683657Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7608488314238256788:2136] TxId# 281474976710670 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2026-02-19T08:19:19.947074Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488003938077271:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:19.947730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:20.205993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:20.361223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:20.361376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:20.396280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:20.511208Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:20.521803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:20.683800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:20.683839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:20.683851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:20.683935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:20.959103Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:20.983403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:23.515621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488021117947743:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.515751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.516224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488021117947753:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.516278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:23.930794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /Root/Dir, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2026-02-19T08:19:23.931027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:23.931097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976715658:1, at schemeshard: 72057594046644480 2026-02-19T08:19:23.932233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:23.932264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:23.935345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2026-02-19T08:19:24.088213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489164130, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:19:24.204186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715658:0 2026-02-19T08:19:24.204269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715658:1 2026-02-19T08:19:24.236293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488025412915292:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.236406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.236935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488025412915295:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.236995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:24.268829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2026-02-19T08:19:24.269332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:24.269367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:19:24.275778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2026-02-19T08:19:24.289286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489164333, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:19:24.303427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715659:0 Fast forward 1m 2026-02-19T08:19:24.933096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488003938077271:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:24.933157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2026-02-19T08:19:34.212744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-02-19T08:19:34.213392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-02-19T08:19:34.213425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:34.309268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-02-19T08:19:34.322939Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:19:34.322971Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:19:35.308682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:35.308709Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded partitions 1 2026-02-19T08:19:36.396563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_move_table.cpp:794: TMoveTable Propose, from: /Root/Dir/Foo, to: /Root/Dir/Bar, opId: 281474976715660:0, at schemeshard: 72057594046644480 2026-02-19T08:19:36.396845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:19:36.402706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Dir/Foo, dst path: /Root/Dir/Bar 2026-02-19T08:19:36.430643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOp ... 037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:20:48.537660Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-02-19T08:20:48.537799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7608488386190172206:2813], serverId# [1:7608488386190172218:5027], sessionId# [0:0:0] 2026-02-19T08:20:48.537875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7608488386190172208:2815], serverId# [1:7608488386190172219:5028], sessionId# [0:0:0] 2026-02-19T08:20:48.538025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608488068362588668 RawX2: 4503603922340190 } TabletId: 72075186224037890 State: 4 2026-02-19T08:20:48.538082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:48.538315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608488068362588668 RawX2: 4503603922340190 } TabletId: 72075186224037890 State: 4 2026-02-19T08:20:48.538351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:48.540561Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:48.540596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:20:48.540636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:20:48.540661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:20:48.540702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:20:48.540706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:20:48.540729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:20:48.540910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608488373305269873 RawX2: 4503603922340566 } TabletId: 72075186224037892 State: 4 2026-02-19T08:20:48.540963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:48.542161Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:48.542231Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:48.543279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608488373305269872 RawX2: 4503603922340565 } TabletId: 72075186224037891 State: 4 2026-02-19T08:20:48.543321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:48.543500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608488373305269872 RawX2: 4503603922340565 } TabletId: 72075186224037891 State: 4 2026-02-19T08:20:48.543521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:48.544241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:20:48.544258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:20:48.544259Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-02-19T08:20:48.544362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:20:48.544583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-02-19T08:20:48.544776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:20:48.545534Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:20:48.546728Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:20:48.546818Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:20:48.547263Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-02-19T08:20:48.550861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:20:48.550871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:20:48.550902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:20:48.551008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:20:48.551024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:20:48.551037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:20:48.551098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:20:48.551128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:20:48.551157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:20:48.551260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:20:48.551301Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-02-19T08:20:48.551559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:20:48.552323Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-02-19T08:20:48.552421Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037892 2026-02-19T08:20:48.552777Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-02-19T08:20:48.554980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:20:48.555017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-02-19T08:20:48.555115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:20:48.555368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:20:48.555614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:20:48.555796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:20:48.555822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:20:48.555832Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-02-19T08:20:48.555885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:20:48.556607Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-02-19T08:20:48.556682Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:20:48.556936Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-02-19T08:20:48.561920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:20:48.561966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:20:48.562023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:20:48.562054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false |92.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TSchemeShardLoginTest::UserLogin1 |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestUnavailableWithoutBoth >> BSCMovePDisk::PDiskMove_Block42 [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginFinalize::NoPublicKeys >> TPQCDTest::TestPrioritizeLocalDatacenter |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 2893953359858353809 >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] >> BSCStopPDisk::PDiskStop [GOOD] >> TSchemeShardLoginTest::UserLogin1 [GOOD] >> TSchemeShardLoginTest::UserLogin2 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TSchemeShardLoginFinalize::NoPublicKeys [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 15028172968242261559 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 11665306963508727105 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TSchemeShardLoginTest::UserLogin2 [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TSchemeShardLoginFinalize::InvalidPassword [GOOD] >> TSchemeShardLoginFinalize::Success >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 15597113232199772654 |92.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: 2026-02-19T08:19:53.071146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488151815675063:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:53.072259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:53.529377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:53.581428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:53.581523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:53.691446Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:53.704321Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488151815675013:2081] 1771489193041425 != 1771489193041428 2026-02-19T08:19:53.737805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:53.741676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:54.117761Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:54.150652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:54.150674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:54.150680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:54.150774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:54.213990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:54.869623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:54.885577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:19:54.956581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.150367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.332452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.412502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.496499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488168995546088:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.496652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.497490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488168995546098:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.497577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:57.836203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.893823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.008295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.046150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488151815675063:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:58.046325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:58.064849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.144875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.224870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.309173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.433478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.641758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488173290514265:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.641830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.641878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488173290514270:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.642339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488173290514273:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.642394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.645384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:58.671874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488173290514272:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:19:58.749294Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488173290514325:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state ... lterResource ok# false data# peer# 2026-02-19T08:20:52.747933Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1cfdf80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-02-19T08:20:52.748062Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1cfd180] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-02-19T08:20:52.748146Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a45e80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-02-19T08:20:52.748269Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1cfb580] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-02-19T08:20:52.748350Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1914880] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-02-19T08:20:52.748465Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a49d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-02-19T08:20:52.748547Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a4a480] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-02-19T08:20:52.748655Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1914180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-02-19T08:20:52.748751Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1915d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-02-19T08:20:52.748860Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a81680] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-02-19T08:20:52.748953Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a80180] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-02-19T08:20:52.749063Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1916480] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-02-19T08:20:52.749141Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a4d580] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-02-19T08:20:52.749258Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a4dc80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-02-19T08:20:52.749355Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1939480] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-02-19T08:20:52.749468Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a47a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-02-19T08:20:52.749563Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c8a080] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-02-19T08:20:52.749664Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1938d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-02-19T08:20:52.749820Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1d77280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-02-19T08:20:52.749871Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a0f380] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-02-19T08:20:52.750035Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1de6480] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-02-19T08:20:52.750090Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1dcf180] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-02-19T08:20:52.750260Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1b24980] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-02-19T08:20:52.750310Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c7f880] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-02-19T08:20:52.750484Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1b23480] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-02-19T08:20:52.750509Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c7f180] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-02-19T08:20:52.750696Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a19480] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-02-19T08:20:52.750708Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a18680] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-02-19T08:20:52.750898Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a1cc80] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-02-19T08:20:52.750918Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a1a280] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-02-19T08:20:52.751096Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a2f980] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-02-19T08:20:52.751131Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a30080] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-02-19T08:20:52.751296Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1988780] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-02-19T08:20:52.751338Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a7f380] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-02-19T08:20:52.751504Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1a99e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-02-19T08:20:52.751551Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1da9e80] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-02-19T08:20:52.751701Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1999f80] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-02-19T08:20:52.751756Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1daa580] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-02-19T08:20:52.751899Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1dbe780] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-02-19T08:20:52.751950Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c02680] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-02-19T08:20:52.752122Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c01f80] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-02-19T08:20:52.752150Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c01880] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-02-19T08:20:52.752321Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1aebb80] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-02-19T08:20:52.752362Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c01180] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-02-19T08:20:52.752518Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1eec580] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-02-19T08:20:52.752560Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1949e80] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-02-19T08:20:52.752609Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1921380] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-02-19T08:20:52.752762Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c00a80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-02-19T08:20:52.752797Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c00380] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-02-19T08:20:52.752970Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1947b80] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-02-19T08:20:52.752998Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1da7480] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-02-19T08:20:52.753179Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1947480] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-02-19T08:20:52.753184Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1da8280] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-02-19T08:20:52.753379Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1da7b80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-02-19T08:20:52.753395Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1dcc080] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-02-19T08:20:52.753572Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c19d2d80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-02-19T08:20:52.753574Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c19d1180] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-02-19T08:20:52.753790Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1dc0a80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-02-19T08:20:52.753982Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1924b80] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-02-19T08:20:52.754198Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c19ca180] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-02-19T08:20:52.754389Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c1c59e80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-02-19T08:20:52.754592Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d10c19c9380] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink |92.2%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TS3WrapperTests::CompleteUnknownUpload >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2026-02-19T08:19:25.465375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488031293360269:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:25.465429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:25.566251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:25.887766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:25.931876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:25.931991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:26.013983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:26.020287Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:26.081860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:26.438532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:26.438562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:26.438571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:26.438671Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:26.513938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:26.827905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:29.715791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488048473230762:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.715920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.716451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488048473230772:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.716539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:29.999914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:30.262159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488052768198227:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.262256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.262337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488052768198232:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.262567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488052768198234:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.262624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:30.265495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:30.288494Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488052768198236:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:19:30.453893Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488052768198318:3076] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:30.466012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488031293360269:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:30.466072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:40.847742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:19:40.847774Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:26.189323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976717661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-02-19T08:20:30.245136Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.2691 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] 2026-02-19T08:20:35.244917Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.2691 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-02-19T08:20:40.250200Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.2691 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-02-19T08:20:45.255255Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 8.5528 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-02-19T08:20:45.353763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:611: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 149786, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 85528, cpuUsageThreshold: 50000) 2026-02-19T08:20:45.353798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:714: Requesting full tablet stats 72075186224037888 to split it 2026-02-19T08:20:45.355127Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2285: Started key access sampling at datashard: 72075186224037888 2026-02-19T08:20:45.356000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 149786 row count 2000 buckets 9 ready 1 2026-02-19T08:20:45.356071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-02-19T08:20:50.253553Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 8.5528 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-02-19T08:20:50.353766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:611: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 149786, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 85528, cpuUsageThreshold: 50000) 2026-02-19T08:20:50.353790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:714: Requesting full tablet stats 72075186224037888 to split it 2026-02-19T08:20:50.355392Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2287: Extended key access sampling at datashard: 72075186224037888 2026-02-19T08:20:50.355845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 149786 row count 2000 buckets 9 ready 1 2026-02-19T08:20:50.355908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 100 2026-02-19T08:20:50.355991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:490: TTxPartitionHistogram Want to Split by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 149786, minShardSize: 0, shardCount: 1, expectedSha ... HEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\264\345\340o\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000\264\345\340o\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000\264\345\340o\000\000\000\200" } SchemeshardId: 72057594046644480 2026-02-19T08:20:50.358096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:20:50.361945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:20:50.371186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976710693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:20:50.371562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976710693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:20:50.371630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710693:0 CreateParts -> ConfigureParts 2026-02-19T08:20:50.378001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2026-02-19T08:20:50.386648Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7608488396365668870:10162] 2026-02-19T08:20:50.390555Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7608488396365668869:10168] 2026-02-19T08:20:50.412747Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-02-19T08:20:50.412870Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-02-19T08:20:50.413057Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-02-19T08:20:50.414930Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:20:50.415046Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:20:50.415203Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:20:50.421181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037890 2026-02-19T08:20:50.424885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710693 TabletId: 72075186224037889 2026-02-19T08:20:50.424941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710693:0 ConfigureParts -> TransferData 2026-02-19T08:20:50.427081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:50.458194Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-02-19T08:20:50.458195Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-02-19T08:20:50.458352Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:20:50.458352Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:20:50.458413Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:20:50.458414Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:20:50.458452Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037890 2026-02-19T08:20:50.458455Z node 1 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037889 2026-02-19T08:20:50.458859Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-02-19T08:20:50.458860Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-02-19T08:20:50.464618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710693 TabletId: 72075186224037888 2026-02-19T08:20:50.465025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710693:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:20:50.467430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:20:50.467818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:20:50.467924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:20:50.471535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2026-02-19T08:20:50.471616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2026-02-19T08:20:50.471642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-02-19T08:20:50.478756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-02-19T08:20:50.478864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:20:50.478903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-02-19T08:20:50.478957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-02-19T08:20:50.481276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2026-02-19T08:20:50.562442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:20:50.562624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.999761s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:20:50.562991Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488031293360588:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-02-19T08:20:50.564243Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7608488031293360588:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-02-19T08:20:50.590315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 27, next wakeup# 14.972085s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-02-19T08:20:50.590752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 28, next wakeup# 14.971631s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:20:50.596769Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-02-19T08:20:50.597137Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-02-19T08:20:50.606058Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-02-19T08:20:50.612252Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:50.612309Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:20:50.615956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:50.616275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:20:50.628523Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:20:50.631252Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:20:50.631365Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:20:50.632114Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Table has 2 shards >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::BanUnbanUser >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload >> TContinuousBackupWithRebootsTests::Basic [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2026-02-19T08:20:55.905324Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1965BCBB-1617-4754-BFBB-25A5F72A4390, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:10742 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 9D2C7893-93E6-4D76-A362-54DEFD8D0919 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2026-02-19T08:20:55.910708Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1965BCBB-1617-4754-BFBB-25A5F72A4390, response# >> TS3WrapperTests::PutObject >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: 2026-02-19T08:19:54.750572Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488154208501433:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:54.750647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:54.844238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:55.299129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:55.299249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:55.317788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:55.331265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:55.384743Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488154208501202:2081] 1771489194708555 != 1771489194708558 2026-02-19T08:19:55.395983Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:55.494146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:55.494174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:55.494182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:55.494271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:55.534076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:55.741260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:56.090223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:56.106538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:56.160416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.342005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.549830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.629392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.869042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488171388372273:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.869145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.869638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488171388372283:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:58.869674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.281801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.313469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.341631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.410394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.451255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.490962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.528273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.580897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.677621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488175683340447:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.677763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.678058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488175683340452:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.678105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488175683340453:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.678177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.682280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:59.694718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:19:59.695265Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488175683340456:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:59.750689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488154208501433:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:59.750790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:59.775891Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488175683340507:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/ ... 897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:41.034173Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:41.035044Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:41.035103Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:41.067695Z node 6 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2026-02-19T08:20:41.067859Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:41.069670Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:41.088460Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:41.105838Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:41.227146Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:41.227175Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:41.227183Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:41.227283Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:41.690164Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:41.795283Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:42.026019Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:42.083398Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:42.239746Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:42.573791Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:42.823913Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:45.679233Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7608488351630702980:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:45.679306Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:46.143554Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488377400509078:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:46.143692Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:46.146042Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488377400509088:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:46.146139Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:46.260473Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.376435Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.483666Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.619482Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.746450Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.865160Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:46.979443Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:47.082996Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:47.259860Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488381695477567:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:47.259985Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:47.260551Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488381695477572:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:47.260620Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488381695477573:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:47.260775Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:47.266426Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:47.304142Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7608488381695477576:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:20:47.396252Z node 6 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [6:7608488381695477654:4837] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } OneShardNonLocalExec: firstNodeId=6 nodeCount=2 drainNode: nodeId=6 undrain=0 drainNode: completed, status=0 movements=19 waitTablets: waiting for all tablets on nodeId=7 waitTablets: attempt 0, tablet leader nodes: {7}, expecting nodeId=7 drainNode: nodeId=6 undrain=1 drainNode: undrain completed drainNode: nodeId=7 undrain=0 drainNode: completed, status=0 movements=39 waitTablets: waiting for all tablets on nodeId=6 waitTablets: attempt 0, tablet leader nodes: {6}, expecting nodeId=6 >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TS3WrapperTests::PutObject [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2026-02-19T08:20:10.010784Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488220786199335:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:10.026074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:10.785873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:10.858031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:10.858148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:11.039604Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:11.072903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488220786199102:2081] 1771489209959042 != 1771489209959045 2026-02-19T08:20:11.096098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:11.100297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:11.101838Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:11.122245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:11.285913Z node 1 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-02-19T08:20:11.358430Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:11.358518Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-02-19T08:20:11.358547Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-02-19T08:20:11.358568Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-02-19T08:20:11.358581Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-02-19T08:20:11.358626Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c84e87f07d0] Connect to grpc://localhost:6827 2026-02-19T08:20:11.369891Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e87f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:11.422464Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2385: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-02-19T08:20:11.490287Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e87f07d0] Status 16 Unauthenticated service 2026-02-19T08:20:11.493882Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-02-19T08:20:11.493931Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-02-19T08:20:11.493984Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:509: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-02-19T08:20:11.494075Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:11.494348Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e87f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:11.502809Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c84e87f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:20:11.503042Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:20:11.504918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:16.177641Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:16.180788Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:16.187198Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:16.205923Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488244713023726:2081] 1771489215831239 != 1771489215831242 2026-02-19T08:20:16.221035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:16.221108Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:16.334044Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:16.350884Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-02-19T08:20:16.351084Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:16.351104Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-02-19T08:20:16.351114Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-02-19T08:20:16.351128Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-02-19T08:20:16.351138Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-02-19T08:20:16.351165Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c84e8823050] Connect to grpc://localhost:17903 2026-02-19T08:20:16.352183Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e8823050] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:16.494213Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2385: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-02-19T08:20:16.501813Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:16.510632Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e8823050] Status 16 Unauthenticated service 2026-02-19T08:20:16.511683Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-02-19T08:20:16.511719Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-02-19T08:20:16.511754Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:509: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-02-19T08:20:16.511799Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:16.511997Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e8823050] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:16.514494Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c84e8823050] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:20:16.514936Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:20:16.880700Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:16.880740Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-02-19T08:20:17.886123Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-02-19T08:20:18.887276Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-02-19T08:20:18.887323Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:70: Refresh token for provider# token-for-access-service 2026-02-19T08:20:18.887405Z node 2 :TOKEN_MANAGER TRACE: vm_metadata_token_provider_handler.cpp:25: Handle send request to vm metaservice 2026-02-19T08:20:18.890034Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-02-19T08:20:18.890172Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:509: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-02-19T08:20:18.890224Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:20:18.890247Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-02-19T08:20:18.890257Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-02-19T08:20:18.890264Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-02-19T08:20:18.890272Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-02-19T08:20:18.890453Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e8823050] Request BulkAuthorizeRequest { iam_token: "**** (9D42FAED)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:20:18.891076Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2385: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# new-****o ... n, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:34.043571Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:34.046279Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:20:34.046342Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c84e88439d0] Connect to grpc://localhost:7747 2026-02-19T08:20:34.047148Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e88439d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-02-19T08:20:34.064227Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e88439d0] Status 14 Service Unavailable 2026-02-19T08:20:34.064572Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:34.064601Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:20:34.064746Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e88439d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-02-19T08:20:34.066377Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e88439d0] Status 14 Service Unavailable 2026-02-19T08:20:34.066548Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:37.997892Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:37.998085Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:38.167565Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:38.168182Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488342248896717:2081] 1771489237910680 != 1771489237910683 2026-02-19T08:20:38.179088Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:38.179201Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:38.208470Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:38.353191Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:38.405977Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:38.405998Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:38.406006Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:38.406076Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:38.756464Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:38.762752Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:38.764421Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:38.764481Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c84e881b250] Connect to grpc://localhost:14607 2026-02-19T08:20:38.765739Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e881b250] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:38.780285Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e881b250] Status 14 Service Unavailable 2026-02-19T08:20:38.780805Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:38.780835Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:38.781023Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e881b250] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:38.786260Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e881b250] Status 14 Service Unavailable 2026-02-19T08:20:38.786572Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:38.926553Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:39.935741Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:20:39.935789Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:39.935969Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e881b250] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:39.943084Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e881b250] Status 14 Service Unavailable 2026-02-19T08:20:39.943375Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:40.937452Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:20:40.937503Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:40.937773Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e881b250] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:40.950461Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c84e881b250] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:20:40.950782Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-02-19T08:20:51.701377Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488399400680948:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:51.702046Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:51.739876Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:51.841789Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:51.844972Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608488399400680915:2081] 1771489251698378 != 1771489251698381 2026-02-19T08:20:51.858043Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:51.858141Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:51.902061Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:51.958307Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:51.958337Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:51.958345Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:51.958417Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:52.009674Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:52.210596Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:52.218890Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:52.220787Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:52.220859Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c84e8898450] Connect to grpc://localhost:25322 2026-02-19T08:20:52.221824Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e8898450] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:52.237165Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c84e8898450] Status 14 Service Unavailable 2026-02-19T08:20:52.237569Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:20:52.237609Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-02-19T08:20:52.237882Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c84e8898450] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-02-19T08:20:52.240175Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c84e8898450] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:20:52.240385Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-02-19T08:20:52.697061Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2026-02-19T08:20:52.110255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488403304952740:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:52.110761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:52.367904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:52.390880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:52.390988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:52.465534Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488403304952697:2081] 1771489252093471 != 1771489252093474 2026-02-19T08:20:52.476389Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.479456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:52.599303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:52.599331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:52.599337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:52.599429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:52.624516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:53.118631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:55.052354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416189855552:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.052495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416189855527:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.052648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.057907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416189855557:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.058041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.059791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:55.073279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488416189855556:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-02-19T08:20:55.205573Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488416189855615:2563] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:55.598608Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488416189855632:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:55.599291Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MWY2YTJiOTQtYWM5YTViNTEtOWI0MjVkOTEtMjY3YWE4NDE=, ActorId: [1:7608488416189855515:2367], ActorState: ExecuteState, LegacyTraceId: 01khtfsqm9dab7d4c9x6nzj648, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:55.616548Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |92.2%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TS3WrapperTests::GetUnknownObject |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2026-02-19T08:20:56.724033Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# E1AB8635-D47B-4298-AD8E-28D7DA535C32, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:19209 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 89CB5695-0C61-4EFB-BB02-363E458EF8FC amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2026-02-19T08:20:56.730095Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# E1AB8635-D47B-4298-AD8E-28D7DA535C32, response# >> TS3WrapperTests::GetObject >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::GetUnknownObject [GOOD] >> TSchemeShardLoginTest::BanUnbanUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:51.827400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:51.827556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.827657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:51.827713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:51.827769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:51.827851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:51.827908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:51.827963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:51.828046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.828145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:51.829154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:51.829596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:51.931976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:51.932047Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:51.948904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:51.949778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:51.950006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:51.974874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:51.986310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:51.987211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:51.987642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:51.991411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:51.991628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:51.992964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:51.993040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:51.993152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:51.993195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:51.993239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:51.993447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.153094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.154939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.155032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.155107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.155204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.155282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576326Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2026-02-19T08:20:56.576353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576382Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2026-02-19T08:20:56.576414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576438Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2026-02-19T08:20:56.576463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576494Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2026-02-19T08:20:56.576548Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2026-02-19T08:20:56.576610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-02-19T08:20:56.576655Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576682Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-02-19T08:20:56.576718Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.576738Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-02-19T08:20:56.576763Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.576788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:20:56.577116Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:56.577230Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.577271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.577324Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.577359Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.577437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:56.577493Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-02-19T08:20:56.577529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.577561Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:20:56.577604Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-02-19T08:20:56.577639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:20:56.579670Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.579775Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2026-02-19T08:20:56.579932Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.579981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.580139Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.580202Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-02-19T08:20:56.580664Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.580756Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.580797Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:20:56.580834Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:20:56.580875Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:20:56.580965Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-02-19T08:20:56.582484Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-02-19T08:20:56.583064Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.583227Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 193us result status StatusSuccess 2026-02-19T08:20:56.583696Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:51.923925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:51.924021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.924085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:51.924123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:51.924155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:51.924214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:51.924242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:51.924281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:51.924352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.924414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:51.925251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:51.925574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.028012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.028083Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.043560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.051158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.051413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.067925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.068812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.069635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.070049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.073854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.074038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.075369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.075432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.075578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.075625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.075679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.075901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.240590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.241640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.241801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.241883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.241948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.242707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... :20:56.702270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:20:56.702313Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:20:56.702359Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:20:56.702408Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:56.702498Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:20:56.705328Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:20:56.705868Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:56.706301Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [4:680:2668] Bootstrap 2026-02-19T08:20:56.707582Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [4:680:2668] Become StateWork (SchemeCache [4:685:2673]) 2026-02-19T08:20:56.707918Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.708104Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 216us result status StatusSuccess 2026-02-19T08:20:56.708568Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.709115Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [4:680:2668] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:20:56.711223Z node 4 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 2026-02-19T08:20:56.712081Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:56.712132Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:121: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-02-19T08:20:56.758657Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: Cannot find user 'user1', at schemeshard: 72057594046678944 2026-02-19T08:20:56.758785Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.758831Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.759025Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.759071Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:20:56.759596Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2026-02-19T08:20:56.759953Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.760121Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 196us result status StatusSuccess 2026-02-19T08:20:56.760599Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxxNWa6ES9tJI3wLvSY9S\nBgRzgtMyBi6Ex5BY2RswsilPkMuYk3gl9pfys2KKjboiNjsN2MZ7Ih3xrY6SZqfA\nrMV7hM3yZXWeCopWpMZD+/IXmCeGcS4PF4Rcq3QTW2vPNO4LNTsO6UoizktDE9CP\nZ7ep7upu18E1QDiU3nMerG9v396MssQDqm1WV08RxqdLgkju0nRq70ehaNIP2vGy\n1uhJ0stFBJLJGc4ZdZ/8OX142q/PARxOThmEPPM7HfRPToK9sg+igj4MGYbDsx8h\nAHMmY9rYDBwfW0srqtUFMzzmhWKpngLb6kAelEm29cE0IJp7phXqW6yA1RbYVLtu\ngwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575656756 } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2026-02-19T08:20:56.913234Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 58D46CA5-C43E-416D-A697-E70AAFF182E1, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:7724 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 92489341-38B0-445D-9404-D4FEAB7E2BFA amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-02-19T08:20:56.918276Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 58D46CA5-C43E-416D-A697-E70AAFF182E1, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> TS3WrapperTests::HeadUnknownObject ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::Basic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:20:20.390984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:20.391101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:20.391209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:20.391252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:20.391305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:20.391347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:20.391398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:20.391439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:20.391503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:20.391597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:20.392542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:20.392885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:20.678831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:20:20.678948Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:20.679966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:20.728362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:20.729304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:20.729510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:20.744552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:20.744889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:20.746253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:20.746538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:20.759562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:20.759780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:20.761180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:20.761246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:20.761484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:20.761543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:20.761591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:20.765927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:21.164530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.165746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.165890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.165984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:21.166802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: ... _SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:20:56.303477Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 6 2026-02-19T08:20:56.303532Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:20:56.304434Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:20:56.304473Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:20:56.304509Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:20:56.304542Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:20:56.304621Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-02-19T08:20:56.305283Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:20:56.305320Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:20:56.305348Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-02-19T08:20:56.307170Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:20:56.310844Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:20:56.311591Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:20:56.311915Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:20:56.311971Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:20:56.312117Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-02-19T08:20:56.312170Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:20:56.312226Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-02-19T08:20:56.312271Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:20:56.312316Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-02-19T08:20:56.312360Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-02-19T08:20:56.312412Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:20:56.312456Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:20:56.312605Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:20:56.312660Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-02-19T08:20:56.312689Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:1 2026-02-19T08:20:56.312724Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:20:56.312753Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-02-19T08:20:56.312777Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:2 2026-02-19T08:20:56.312843Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:20:56.313902Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:56.314057Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:20:56.314232Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:20:56.314337Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:20:56.314436Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:20:56.314496Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:20:56.314537Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:20:56.314986Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:56.315304Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:56.315517Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:56.315563Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:20:56.318430Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:20:56.318826Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:20:56.318880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:20:56.319358Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:20:56.319471Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:20:56.319537Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [16:1167:3083] TestWaitNotification: OK eventTxId 104 2026-02-19T08:20:56.320188Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.320434Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 301us result status StatusPathDoesNotExist 2026-02-19T08:20:56.320614Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:20:56.321187Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.321389Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 232us result status StatusPathDoesNotExist 2026-02-19T08:20:56.321548Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:51.998519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:51.998732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.998848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:51.998908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:51.998949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:51.999010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:51.999044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:51.999089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:51.999164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:51.999240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.000196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.000546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.098388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.098459Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.119853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.122446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.122667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.143684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.144304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.144994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.145333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.148347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.148504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.149690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.149766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.149908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.149966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.150014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.150196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.302321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.302987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.303652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.559800Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2026-02-19T08:20:56.559846Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.559882Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2026-02-19T08:20:56.559914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.559932Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2026-02-19T08:20:56.559955Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.559976Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2026-02-19T08:20:56.559999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.560020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2026-02-19T08:20:56.560061Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.560082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-02-19T08:20:56.560108Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.560140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-02-19T08:20:56.560170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-02-19T08:20:56.560189Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-02-19T08:20:56.560214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.560235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:20:56.560538Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:56.560644Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.560675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.560710Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.560745Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.560789Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:56.560861Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-02-19T08:20:56.560913Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.560946Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:20:56.560977Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-02-19T08:20:56.561009Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:20:56.562853Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.562986Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2026-02-19T08:20:56.563153Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.563191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.563345Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.563402Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-02-19T08:20:56.563885Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.563980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.564021Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:20:56.564061Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:20:56.564105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:20:56.564189Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-02-19T08:20:56.565683Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-02-19T08:20:56.566203Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.566361Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 175us result status StatusSuccess 2026-02-19T08:20:56.566781Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TS3WrapperTests::GetObject [GOOD] >> TS3WrapperTests::CopyPartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.156138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.156238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.156312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.156351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.156392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.156459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.156499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.156527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.156594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.156674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.157570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.157918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.261797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.261869Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.280218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.281004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.281231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.294114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.294833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.295673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.296033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.299920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.300137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.301386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.301450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.301558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.301615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.301668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.301919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.436090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.437943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.438029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.438120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.438209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.861168Z node 4 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [4:680:2668] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:20:56.863270Z node 4 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2026-02-19T08:20:56.865802Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:20:56.871753Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:56.871915Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:20:56.871965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:56.872027Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:20:56.872079Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:56.872150Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:20:56.872231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:20:56.872281Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:20:56.872329Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:20:56.872373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2026-02-19T08:20:56.872415Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:20:56.875258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.875386Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-02-19T08:20:56.875635Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.875714Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.875910Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.875963Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:20:56.876557Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:20:56.876698Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:20:56.876756Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:20:56.876806Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:20:56.876857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:56.876961Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:20:56.878894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-02-19T08:20:56.879286Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:56.879338Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:121: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-02-19T08:20:57.027301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.031153Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:20:57.031355Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:57.031405Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:57.031923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:20:57.031987Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:57.032040Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:20:57.032666Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-02-19T08:20:57.033010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.033117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:20:57.038220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.040322Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:20:57.040686Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.040804Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:20:57.046146Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.048309Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:20:57.048717Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.048835Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:20:57.054570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.056848Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:20:57.057348Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.057467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-02-19T08:20:57.057910Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.058016Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 |92.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.234704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.234798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.234891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.234933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.234996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.235061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.235096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.235133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.235191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.235269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.236151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.236488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.328884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.328943Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.351948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.361647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.361865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.375501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.376165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.376960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.377346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.380980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.381161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.382421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.382479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.382586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.382630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.382669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.382821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.525365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.526949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.527013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.527090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.527184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.527294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.527374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 945365Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.945413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:20:56.945532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:20:56.945632Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.945673Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-02-19T08:20:56.945739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 105, path id: 38 2026-02-19T08:20:56.946364Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:20:56.946477Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:20:56.946521Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:20:56.946563Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-02-19T08:20:56.946609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:20:56.946987Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:20:56.947073Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:20:56.947103Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:20:56.947132Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-02-19T08:20:56.947162Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:20:56.947250Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-02-19T08:20:56.955779Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:20:56.956104Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2026-02-19T08:20:56.958465Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:20:56.958990Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:56.959116Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.959163Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.959211Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:20:56.959249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.959312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:56.959371Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-02-19T08:20:56.959418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:20:56.959456Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:20:56.959518Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-02-19T08:20:56.959556Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2026-02-19T08:20:56.961609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.961734Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2026-02-19T08:20:56.961921Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:56.961971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:56.962161Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:56.962215Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-02-19T08:20:56.962781Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.962885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:20:56.962928Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:20:56.962969Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-02-19T08:20:56.963021Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:20:56.963118Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-02-19T08:20:56.964795Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-02-19T08:20:56.965341Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:20:56.965537Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 237us result status StatusSuccess 2026-02-19T08:20:56.965943Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:56.966478Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:56.966574Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: Cannot find user 'user1', at schemeshard: 72057594046678944 >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TSchemeShardLoginFinalize::Success [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2026-02-19T08:20:57.516726Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 49636C8A-ABD9-4493-985E-B8CF45D2895A, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:12557 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 9719CF1E-814E-48C0-83E1-C170F2E5C330 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings 2026-02-19T08:20:57.521778Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 49636C8A-ABD9-4493-985E-B8CF45D2895A, response# No response body. |92.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2026-02-19T08:20:57.827808Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# FEF43C49-794D-4111-AF09-A72219B6F9CB, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:15481 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 52D9E9AA-45B1-4765-A07E-DFF452CF0864 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-02-19T08:20:57.833442Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# FEF43C49-794D-4111-AF09-A72219B6F9CB, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-02-19T08:20:57.833875Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 38AF8293-A109-41B2-B93C-FBB0F4FF658F, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:15481 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: CB876E4D-144A-46DE-9582-F9D25385C40C amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2026-02-19T08:20:57.837695Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 38AF8293-A109-41B2-B93C-FBB0F4FF658F, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2026-02-19T08:20:57.837996Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# AA02EBF3-BEAF-4BD0-BE93-F90692EDE913, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:15481 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 98F7805A-A31B-4C8B-BD7C-D14F6696D7C1 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2026-02-19T08:20:57.840861Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# AA02EBF3-BEAF-4BD0-BE93-F90692EDE913, response# UploadPartCopyResult { } 2026-02-19T08:20:57.841248Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1362301F-2387-45A4-ABDA-F7DB6F111032, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:15481 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: C2CF42AE-5154-408C-9C91-DBCE417DB934 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2026-02-19T08:20:57.844328Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1362301F-2387-45A4-ABDA-F7DB6F111032, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2026-02-19T08:20:57.844672Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# DB65DAB5-9198-4789-BB00-46F73A1D8F51, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:15481 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 210C18EB-54E9-486B-B6D8-4D793185A7F9 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeRead: /TEST/key1 / 2 2026-02-19T08:20:57.847186Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# DB65DAB5-9198-4789-BB00-46F73A1D8F51, response# GetObjectResult { } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BanUnbanUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.448906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.448999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.449061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.449115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.449156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.449212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.449244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.449276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.449344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.449421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.450279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.450584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.528514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.528584Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.542303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.543043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.543236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.554483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.555125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.555887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.556154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.558924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.559096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.560256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.560310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.560406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.560446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.560481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.560626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.720514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.721944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.722506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:57.348243Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-02-19T08:20:57.348303Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:57.348340Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:20:57.348788Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-02-19T08:20:57.350367Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: false } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:20:57.350787Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:57.350882Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:20:57.350915Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:57.350966Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:20:57.350997Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:57.351049Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:20:57.351109Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:20:57.351146Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:20:57.351185Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:20:57.351222Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 1, subscribers: 0 2026-02-19T08:20:57.351262Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-02-19T08:20:57.353293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSuccess TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:57.353398Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-02-19T08:20:57.353540Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:57.353583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:57.353770Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:57.353820Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:20:57.354275Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:57.354387Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:20:57.354433Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:20:57.354467Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:20:57.354502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:57.354608Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:20:57.356240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:20:57.356591Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.356689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: account is blocked, at schemeshard: 72057594046678944 2026-02-19T08:20:57.358487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: true } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:20:57.358945Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-02-19T08:20:57.359051Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:20:57.359114Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:20:57.359153Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:20:57.359190Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:20:57.359243Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:20:57.359296Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:20:57.359327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:20:57.359357Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:20:57.359387Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-02-19T08:20:57.359431Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:20:57.361339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:20:57.361437Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-02-19T08:20:57.361613Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:57.361671Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:57.361844Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:57.361900Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2212], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-02-19T08:20:57.362319Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:20:57.362392Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:20:57.362434Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:20:57.362477Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:20:57.362521Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:20:57.362618Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:20:57.364271Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:20:57.364565Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.364643Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:20:57.368252Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:20:57.372953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2026-02-19T08:20:57.849967Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 3604F533-1880-4405-B445-F9CC34798F54, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:28251 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 2DB7AC02-5463-4463-8A0C-EF00E3F8E352 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings 2026-02-19T08:20:57.854890Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 3604F533-1880-4405-B445-F9CC34798F54, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2026-02-19T08:20:57.739697Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 6D6470E0-4751-4AFD-9587-7246DF30B064, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:26005 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 0B3E8E30-845E-49C9-9BFC-C0C08FA5190A amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-02-19T08:20:57.744085Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 6D6470E0-4751-4AFD-9587-7246DF30B064, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-02-19T08:20:57.744412Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# B4640AE7-D928-40DD-8154-ED909AFC3BA6, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:26005 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 39034882-F181-481C-828D-08E4C467A30E amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeRead: /TEST/key / 4 2026-02-19T08:20:57.746815Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# B4640AE7-D928-40DD-8154-ED909AFC3BA6, response# GetObjectResult { } >> TS3WrapperTests::AbortMultipartUpload >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation |92.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TS3WrapperTests::MultipartUpload >> Secret::DeactivatedQueryService [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2026-02-19T08:20:52.421749Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488403870008681:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:52.425583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:52.677180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:52.694207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:52.694324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:52.785873Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488403870008649:2081] 1771489252409060 != 1771489252409063 2026-02-19T08:20:52.804755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:52.808348Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.910194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/003205/r3tmp/yandex3jcAt0.tmp 2026-02-19T08:20:52.910233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/003205/r3tmp/yandex3jcAt0.tmp 2026-02-19T08:20:52.910453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/003205/r3tmp/yandex3jcAt0.tmp 2026-02-19T08:20:52.910563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:52.974967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:21210 2026-02-19T08:20:53.132045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:53.430356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:55.579061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416754911499:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.579270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.579828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416754911512:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.579888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488416754911513:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.580160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.584263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:55.608880Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488416754911516:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:20:55.885261Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488416754911581:2587] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:55.910950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:56.017789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:56.032483Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488416754911589:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:56.035594Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NWNlMWUxNS1mYTEyZmYwNC02NWI3YjkxOC0zNDMyMTVmMg==, ActorId: [1:7608488416754911482:2330], ActorState: ExecuteState, LegacyTraceId: 01khtfsr4rfbhpeg9e59cms0vq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:56.037315Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:20:56.110659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-02-19T08:20:57.417226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488403870008681:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:57.417314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TS3WrapperTests::HeadObject >> Secret::Deactivated [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2026-02-19T08:20:52.732585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488404501118552:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:52.732702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:52.764956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:52.994819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:53.024708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:53.024804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:53.069854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:53.113814Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:53.113930Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488404501118501:2081] 1771489252726315 != 1771489252726318 2026-02-19T08:20:53.164002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:20:53.204728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/003200/r3tmp/yandexylqMD5.tmp 2026-02-19T08:20:53.204764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/003200/r3tmp/yandexylqMD5.tmp 2026-02-19T08:20:53.204914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/003200/r3tmp/yandexylqMD5.tmp 2026-02-19T08:20:53.205062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:62965 2026-02-19T08:20:53.412473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:53.745361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:55.868161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488417386021334:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.868361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.868842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488417386021361:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.868910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488417386021360:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.869119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.874577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:55.886739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488417386021364:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:20:56.206975Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488417386021429:2586] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:56.239547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:56.336477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:56.382393Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488421680988733:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:56.383381Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YjQ0NWI4OTQtNDdlMTQ3MDMtYTlkM2IyNGQtMWUyOTk3MzI=, ActorId: [1:7608488417386021321:2329], ActorState: ExecuteState, LegacyTraceId: 01khtfsrds4612np8yaqa3kqht, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:56.385894Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:20:56.436599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-02-19T08:20:57.732758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488404501118552:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:57.732858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TS3WrapperTests::HeadObject [GOOD] >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2026-02-19T08:20:58.904489Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 73E59606-3F61-43B4-9712-56199DD52A28, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:6745 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 5AFF6A88-3098-493E-B864-D6EA1A4A2FB5 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-02-19T08:20:58.909932Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 73E59606-3F61-43B4-9712-56199DD52A28, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-02-19T08:20:58.910216Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# C8C9F572-EF61-4CCF-92AD-48DF2B1DE81B, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:6745 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 20EE0B42-8EA5-4FFF-963C-B2D11285D52C amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2026-02-19T08:20:58.912713Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# C8C9F572-EF61-4CCF-92AD-48DF2B1DE81B, response# AbortMultipartUploadResult { } 2026-02-19T08:20:58.912968Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 6617E08F-40A3-48BB-B2FE-32DAE31CC7BD, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:6745 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 2C58F488-A4BD-4604-8C8F-D21018455ADD amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings 2026-02-19T08:20:58.915942Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 6617E08F-40A3-48BB-B2FE-32DAE31CC7BD, response# No response body. >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2026-02-19T08:20:59.157085Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# CC35F7C7-D7A5-44C2-9216-C5DB71CA1478, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:16760 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 8DF57749-A3F8-4E37-AEFE-3A8F82A298CC amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-02-19T08:20:59.162407Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# CC35F7C7-D7A5-44C2-9216-C5DB71CA1478, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-02-19T08:20:59.162961Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 785CF056-630C-4CA3-909E-FA0E3E41016F, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16760 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 9CE2C95C-44EF-47C3-91BD-A613FE656823 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2026-02-19T08:20:59.166286Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 785CF056-630C-4CA3-909E-FA0E3E41016F, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-02-19T08:20:59.167424Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 41B2C03E-8F94-4B5F-A7AA-714135573F18, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16760 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 633AB7A1-AB46-463E-B14F-2CF0091CB75E amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2026-02-19T08:20:59.170521Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 41B2C03E-8F94-4B5F-A7AA-714135573F18, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-02-19T08:20:59.170769Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1F4A4898-0B7C-4E7A-92CF-34DFBD616C06, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:16760 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 158CF546-A61F-4647-9562-9601685AFCD7 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeRead: /TEST/key / 4 2026-02-19T08:20:59.173410Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1F4A4898-0B7C-4E7A-92CF-34DFBD616C06, response# GetObjectResult { } |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2026-02-19T08:20:59.393906Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 46BB38E9-C267-480C-99F1-D5CC1421AAB9, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:15994 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 853F50D7-6CFD-49E6-9ECE-FDB5A241718C amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-02-19T08:20:59.399221Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 46BB38E9-C267-480C-99F1-D5CC1421AAB9, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-02-19T08:20:59.400482Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 00D8296F-4F3D-431B-843B-852888E91532, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:15994 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 41F0F336-A21A-4BEE-8671-FE07AD61AF28 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeRead: /TEST/key / 4 2026-02-19T08:20:59.403405Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 00D8296F-4F3D-431B-843B-852888E91532, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:44.869585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:45.112089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:45.138903Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:45.700830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:45.700961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:45.708022Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489241267131 != 1771489241267135 2026-02-19T08:20:45.722355Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:45.776313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:45.889395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:46.415060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:46.416509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:46.416572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:46.416607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:46.416910Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:46.430905Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:20:58.306742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:888:2766], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.306846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:899:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.307328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.307929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:904:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.308075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.311572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:58.329145Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:902:2774], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-02-19T08:20:58.365361Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:955:2808] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:58.569479Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:964:2816], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:1: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2026-02-19T08:20:58.571612Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=N2FjYWI1ZjEtOGU0MDBiNjQtMjViYzhmYjQtMjk0MjliY2Q=, ActorId: [1:886:2764], ActorState: ExecuteState, LegacyTraceId: 01khtfstswda0p7hyye0dxadpf, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 1 } message: "Executing CREATE OBJECT SECRET" end_position { row: 1 column: 1 } severity: 1 issues { message: "metadata provider service is disabled" severity: 1 } } } tx_id# trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:1: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> BridgeGet::PartRestorationAcrossBridgeOnRange |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:45.312282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:45.463062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:45.473268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:297:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:45.915229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:45.915381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:45.924520Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489242131043 != 1771489242131047 2026-02-19T08:20:45.933350Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:45.991088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:46.086575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:46.675983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:46.676899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:46.676939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:46.676966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:46.677181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:46.690412Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:20:58.828656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:890:2767], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.828903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.829359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:900:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:58.829538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:1: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> BridgeGet::PartRestorationAcrossBridge |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |92.3%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] >> WebLoginServiceAudit::AuditLogLoginSuccess >> TTicketParserTest::AuthenticationWithUserAccount >> TTicketParserTest::BulkAuthorizationRetryError >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::NebiusAuthorizationRetryError >> TTicketParserTest::LoginGood >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveIndexSameDst >> TTicketParserTest::LoginRefreshGroupsWithError >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::TwoTables |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::ReplaceWithDifferentIndexNames ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.181278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.181389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.181465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.181504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.181546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.181603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.181634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.181670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.181765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.181835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.182789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.183165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.287183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.287250Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.297519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.298216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.298468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.313082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.313896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.314709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.315049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.320989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.321204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.322656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.322727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.322851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.322903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.322949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.323190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.474938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.476976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.477084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.477175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 44, LocalPathId: 10] was 0 2026-02-19T08:21:01.784708Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-02-19T08:21:01.784738Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-02-19T08:21:01.784773Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-02-19T08:21:01.784799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-02-19T08:21:01.784816Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-02-19T08:21:01.784830Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-02-19T08:21:01.784843Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-02-19T08:21:01.784856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-02-19T08:21:01.784870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-02-19T08:21:01.784888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-02-19T08:21:01.784902Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-02-19T08:21:01.784918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-02-19T08:21:01.784933Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-02-19T08:21:01.784960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-02-19T08:21:01.784978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-02-19T08:21:01.784992Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-02-19T08:21:01.785017Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-02-19T08:21:01.785033Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:21:01.785048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:21:01.785060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:21:01.785072Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:21:01.785097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:21:01.785124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:21:01.785152Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:21:01.785173Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:21:01.785186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:21:01.785200Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:21:01.785322Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.785387Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.785448Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.785531Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.785593Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.785704Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.786049Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.786195Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.786654Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.786741Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.786980Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.787156Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.787224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.787315Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.787750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.787850Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.788305Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.788590Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.788696Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.788787Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.788952Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.789025Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.789104Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:01.795709Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:01.801595Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:01.801665Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:01.802429Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:01.802489Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:01.802546Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:01.803165Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:823:2789] sender: [4:882:2058] recipient: [4:15:2062] 2026-02-19T08:21:01.841268Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:01.841350Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:121: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-02-19T08:21:01.914584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:01.922319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:01.922509Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:01.922563Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:01.923148Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-02-19T08:21:01.923205Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:01.923248Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:876:2830], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:21:01.924135Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 0 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |92.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2026-02-19T08:20:52.116903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488405447275145:2243];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:52.117135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:20:52.345774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:20:52.373851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:52.373969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:52.411441Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.412705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:52.413243Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488405447274929:2081] 1771489252081707 != 1771489252081710 2026-02-19T08:20:52.550178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/003207/r3tmp/yandexD34kva.tmp 2026-02-19T08:20:52.550198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/003207/r3tmp/yandexD34kva.tmp 2026-02-19T08:20:52.550355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/003207/r3tmp/yandexD34kva.tmp 2026-02-19T08:20:52.550430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:52.561907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:15736 2026-02-19T08:20:52.774454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:52.782529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:20:52.792700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:20:53.117315Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:55.110381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488418332177760:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.110606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.111258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488418332177796:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.111309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488418332177797:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.111496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:55.123007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:55.138336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-02-19T08:20:55.138968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488418332177800:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:20:55.394064Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488418332177867:2592] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:55.399255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:55.583202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:55.649549Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488418332177907:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:20:55.652013Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZWZjMWU4ODYtZTRhYjBmYTctZjI4YTg1Ni1mMDYyMjc0ZA==, ActorId: [1:7608488418332177757:2328], ActorState: ExecuteState, LegacyTraceId: 01khtfsqnr5j9g64n7ced5bb84, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:20:55.656032Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:20:55.729273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-02-19T08:20:57.109877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488405447275145:2243];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:57.109962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink |92.3%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::Boot [GOOD] |92.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly |92.3%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |92.3%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::ReplaceWithDifferentIndexNames [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex >> BridgeGet::PartRestorationAcrossBridge [GOOD] >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationRetryError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.544967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.545039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.545086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.545113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.545155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.545196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.545220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.545239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.545290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.545355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.546080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.546352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.622292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.622351Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.637140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.637915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.638134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.652174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.652900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.653675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.654052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.657544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.657745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.659080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.659141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.659285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.659356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.659421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.659616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.787032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.788988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.789062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.789147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.789260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.789339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 78944 2026-02-19T08:21:00.776023Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:00.776263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:00.776306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:00.776743Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:21:00.776794Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:00.776832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:21:00.777312Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-02-19T08:21:00.777518Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.777584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:00.780813Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.782424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:21:00.782642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.782697Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:00.786863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.788908Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:21:00.789251Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.789338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:00.793979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.795754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:21:00.796052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.796145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-02-19T08:21:00.796475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:00.796549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-02-19T08:21:00.796898Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:00.797069Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 215us result status StatusSuccess 2026-02-19T08:21:00.797590Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuZvGmsWMaSF1YlsKl6s2\nPxKma3IQguAyJ0NefvcRgGBcA/dJAw7kefHGT65sDEGtIz+uGPtxikrm7Tekw36O\nx+ra2H0IpRE4RQgYQAf1Gc3Sfjn1AWba/7IZ6icYaD7UGzi+ZXL8vNrqBtG/7icc\ntvtb9Re2V2AMH5ue43BLyLvr7yVkH2k5z/CIHhRVGoi7WBEder6Ghcuv6H57u5e+\nzaeon91AEJCWUKyFKSSm/WJlND2JyyedgjpI4hOS6RtDcPxCsZg/wI+CxFcaWB9m\nxA/tIWDVB59i/AQVgoF3IkHvguSmBqsYlo/Rc6Tv/onHxltw4PfntBNaPOIgFv3G\npQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575660767 } Sids { Name: "user1" Type: USER HashesInitParams { HashType: Argon InitParams: "xeJYFXPrYIZZf7UgCMnOSw==" } } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:04.798563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:04.803732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:04.806829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:04.808108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-02-19T08:21:04.808624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:04.808754Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:04.813530Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:04.819604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-02-19T08:21:04.820260Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:04.820564Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 326us result status StatusSuccess 2026-02-19T08:21:04.821180Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuZvGmsWMaSF1YlsKl6s2\nPxKma3IQguAyJ0NefvcRgGBcA/dJAw7kefHGT65sDEGtIz+uGPtxikrm7Tekw36O\nx+ra2H0IpRE4RQgYQAf1Gc3Sfjn1AWba/7IZ6icYaD7UGzi+ZXL8vNrqBtG/7icc\ntvtb9Re2V2AMH5ue43BLyLvr7yVkH2k5z/CIHhRVGoi7WBEder6Ghcuv6H57u5e+\nzaeon91AEJCWUKyFKSSm/WJlND2JyyedgjpI4hOS6RtDcPxCsZg/wI+CxFcaWB9m\nxA/tIWDVB59i/AQVgoF3IkHvguSmBqsYlo/Rc6Tv/onHxltw4PfntBNaPOIgFv3G\npQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575660767 } Sids { Name: "user1" Type: USER HashesInitParams { HashType: Argon InitParams: "xeJYFXPrYIZZf7UgCMnOSw==" } } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb >> TestSetCloudPermissions::CanSetAllPermissions >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::Replace2 >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes >> KqpQuery::ExecuteWriteQuery [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] Test command err: RandomSeed# 17292232283562956161 originalGroupIndex# 0 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 0 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.158245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.158330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.158408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.158447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.158490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.158525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.158562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.158590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.158667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.158832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.159740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.160070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.266494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.266558Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.280383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.281028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.281222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.292506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.293237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.294031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.295178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.302487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.302682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.303867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.303934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.304036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.304086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.304132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.304298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.481932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Step: 0 Generation: 2 2026-02-19T08:21:05.784365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:05.784405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2026-02-19T08:21:05.784442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:21:05.784494Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:2 ProposedWaitParts -> Done 2026-02-19T08:21:05.785232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:21:05.785270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2026-02-19T08:21:05.785381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:21:05.785424Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:556: TMoveTable TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:21:05.785484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:562: TMoveTable TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:21:05.785543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:05.785577Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:05.785606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:21:05.785661Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done 2026-02-19T08:21:05.788420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-02-19T08:21:05.790066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:05.792624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-02-19T08:21:05.793142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:05.793395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2026-02-19T08:21:05.793457Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:707: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:05.793519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:717: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-02-19T08:21:05.793628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 2/3 2026-02-19T08:21:05.793658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-02-19T08:21:05.793697Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 2/3 2026-02-19T08:21:05.793755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-02-19T08:21:05.793793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2026-02-19T08:21:05.794124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:05.794168Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:707: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:05.794200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:717: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:05.794268Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 3/3 2026-02-19T08:21:05.794302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-02-19T08:21:05.794333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 3/3 2026-02-19T08:21:05.794356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-02-19T08:21:05.794381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2026-02-19T08:21:05.794416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-02-19T08:21:05.794458Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:21:05.794491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:21:05.794641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-02-19T08:21:05.794689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:21:05.794734Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-02-19T08:21:05.794756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:1 2026-02-19T08:21:05.794815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-02-19T08:21:05.794844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:05.794867Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-02-19T08:21:05.794896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:2 2026-02-19T08:21:05.794953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-02-19T08:21:05.794993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:05.795404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:05.795453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:21:05.795536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:21:05.795575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:21:05.795615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:21:05.795645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:21:05.795676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:21:05.807585Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:21:05.808191Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [2:680:2669] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2026-02-19T08:21:05.856999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:21:05.857088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:21:05.857573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:21:05.857691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:21:05.857755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:1086:2968] TestWaitNotification: OK eventTxId 103 >> TSchemeShardMoveTest::Index [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.197558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.197648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.197746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.197785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.197831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.197866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.197899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.197933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.198013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.198108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.198941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.199233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.282913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.282959Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.300102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.301663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.301888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.318032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.322202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.322773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.323101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.327182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.327419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.328729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.328804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.328974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.329025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.329093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.329320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.484526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.485920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 000042 2026-02-19T08:21:06.422054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:06.422126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:06.422163Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-02-19T08:21:06.422224Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710760:0 Propose -> Done 2026-02-19T08:21:06.423975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.424026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-02-19T08:21:06.424104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:21:06.424141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:21:06.424178Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:21:06.424199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:21:06.424225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-02-19T08:21:06.424275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:126:2151] message: TxId: 281474976710760 2026-02-19T08:21:06.424378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:21:06.424408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-02-19T08:21:06.424430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710760:0 2026-02-19T08:21:06.424498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-02-19T08:21:06.428662Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-02-19T08:21:06.428740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710760 2026-02-19T08:21:06.428806Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-02-19T08:21:06.428922Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:861:2819], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-02-19T08:21:06.430637Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-02-19T08:21:06.430765Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:861:2819], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:21:06.430816Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:21:06.432315Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-02-19T08:21:06.432440Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:861:2819], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:21:06.432487Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-02-19T08:21:06.432600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:06.432651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1044:2990] TestWaitNotification: OK eventTxId 102 2026-02-19T08:21:06.433228Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.433474Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 276us result status StatusSuccess 2026-02-19T08:21:06.433963Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 41 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-02-19T08:21:03.203458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.203544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.203633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.203662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.203696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.203724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.203744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.203765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.203817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.203902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.204553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.204782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.291700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.291749Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.297405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.297617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.297792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.301586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.301808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.302614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.302831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.304705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.304909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.305686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.305778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.306291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.306351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.306397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.306568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.490398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.491971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.492061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.492137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.492217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.492293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... hemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:06.394803Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.395028Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 234us result status StatusSuccess 2026-02-19T08:21:06.395737Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:06.422530Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.422815Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 308us result status StatusSuccess 2026-02-19T08:21:06.423619Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 41 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 41 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.290958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.291052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.291135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.291221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.291271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.291312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.291350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.291376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.291455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.291560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.292438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.292765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.393262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.393321Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.406003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.406791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.407012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.418796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.419549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.420247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.420583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.423729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.423920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.425097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.425155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.425254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.425299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.425340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.425517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.583352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.584947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.585004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.585079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.585167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.585230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... de 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-02-19T08:21:06.541523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:21:06.545324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-02-19T08:21:06.545456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000040 2026-02-19T08:21:06.545867Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:06.545984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:06.546044Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000040, at schemeshard: 72057594046678944 2026-02-19T08:21:06.546338Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 105:0 Propose -> ProposedWaitParts 2026-02-19T08:21:06.546467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:21:06.551503Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:06.551588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:21:06.551822Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:06.551863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-02-19T08:21:06.552336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.552386Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:21:06.553412Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:21:06.553514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:21:06.553557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:21:06.553593Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 8 2026-02-19T08:21:06.553635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:21:06.553738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 105 2026-02-19T08:21:06.555243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1280 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-02-19T08:21:06.555293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-02-19T08:21:06.555414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1280 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-02-19T08:21:06.555561Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1280 } } CommitVersion { Step: 5000040 TxId: 105 } debug: NTableState::TProposedWaitParts operationId# 105:0 2026-02-19T08:21:06.556762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1081 RawX2: 8589937611 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-02-19T08:21:06.556810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-02-19T08:21:06.556941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1081 RawX2: 8589937611 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-02-19T08:21:06.557042Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:21:06.557153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1081 RawX2: 8589937611 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-02-19T08:21:06.557218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:06.557250Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.557289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:21:06.557331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 105:0 ProposedWaitParts -> Done 2026-02-19T08:21:06.558097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:21:06.559614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.559721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.559935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.559979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 105:0 ProgressState 2026-02-19T08:21:06.560083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:21:06.560118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:21:06.560153Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:21:06.560185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:21:06.560216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2026-02-19T08:21:06.560276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:744:2721] message: TxId: 105 2026-02-19T08:21:06.560319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:21:06.560355Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-02-19T08:21:06.560386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:0 2026-02-19T08:21:06.560523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:21:06.562014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:21:06.562071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1250:3169] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.248785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.248876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.248945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.248986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.249031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.249090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.249130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.249173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.249262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.249362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.250338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.250638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.355266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.355336Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.367664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.368346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.368545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.379845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.380557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.381300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.381608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.387624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.387826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.389065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.389130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.389279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.389347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.389406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.389597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.549529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.553602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.553812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.553913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.553977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.554647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... p:556: TMoveTable TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:21:06.725617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:562: TMoveTable TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 732 RawX2: 8589937306 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:21:06.725674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:06.725723Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2026-02-19T08:21:06.725757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:21:06.725796Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:2 ProposedWaitParts -> Done 2026-02-19T08:21:06.726465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 8589937309 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:21:06.726497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:21:06.726575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 8589937309 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:21:06.726608Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:556: TMoveTable TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:21:06.726658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:562: TMoveTable TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 736 RawX2: 8589937309 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:21:06.726692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:06.726715Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.726737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:21:06.726766Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:21:06.728185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-02-19T08:21:06.729200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.729331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-02-19T08:21:06.729892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2026-02-19T08:21:06.729932Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:707: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:06.729973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:717: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-02-19T08:21:06.730084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-02-19T08:21:06.730128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-02-19T08:21:06.730199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-02-19T08:21:06.730229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-02-19T08:21:06.730257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2026-02-19T08:21:06.730747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.730823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:21:06.730848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:707: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:06.730874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:717: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:06.730929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-02-19T08:21:06.730953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-02-19T08:21:06.730980Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-02-19T08:21:06.731000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-02-19T08:21:06.731020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2026-02-19T08:21:06.731079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:790:2756] message: TxId: 102 2026-02-19T08:21:06.731121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-02-19T08:21:06.731164Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:21:06.731196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:21:06.731292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-02-19T08:21:06.731321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:21:06.731351Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-02-19T08:21:06.731367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:1 2026-02-19T08:21:06.731389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-02-19T08:21:06.731404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:06.731419Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-02-19T08:21:06.731437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:2 2026-02-19T08:21:06.731468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-02-19T08:21:06.731483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:06.731882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:06.731922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:21:06.731986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:21:06.732027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:21:06.732058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:21:06.732090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:21:06.732119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:21:06.736470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:06.736521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:889:2847] 2026-02-19T08:21:06.736656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.357256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.357324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.357374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.357400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.357436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.357462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.357488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.357510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.357566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.357630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.358551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.358846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.462765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.462829Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.480057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.480774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.481001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.492467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.493289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.494083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.494410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.497529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.497740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.498940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.499000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.499106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.499150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.499194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.499388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.667095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.668954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.669035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.669122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.669215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EvNotifyTxCompletionResult to actorId: [2:783:2750] message: TxId: 102 2026-02-19T08:21:06.678326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-02-19T08:21:06.678369Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:21:06.678404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:21:06.678522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:21:06.678559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:21:06.678601Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-02-19T08:21:06.678623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:1 2026-02-19T08:21:06.678687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:21:06.678715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:21:06.679058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:06.679112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:21:06.679183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:21:06.679223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:21:06.679258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:21:06.681463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:06.681512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:881:2834] 2026-02-19T08:21:06.681975Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-02-19T08:21:06.685356Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.685543Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 213us result status StatusPathDoesNotExist 2026-02-19T08:21:06.685694Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:21:06.686068Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.686246Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 189us result status StatusPathDoesNotExist 2026-02-19T08:21:06.686381Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:21:06.686736Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.686921Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 215us result status StatusSuccess 2026-02-19T08:21:06.687235Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:06.687636Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:06.687745Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 124us result status StatusSuccess 2026-02-19T08:21:06.687943Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.496857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.496946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.497012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.497050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.497092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.497129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.497162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.497189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.497262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.497359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.498207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.498494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.575553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.575611Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.588430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.589158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.589390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.611887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.612607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.613366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.613639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.616504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.616671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.617760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.617815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.617918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.617965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.618004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.618181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.785836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.786904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.787990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.788059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 8944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:07.038754Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:07.038984Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 245us result status StatusSuccess 2026-02-19T08:21:07.039786Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:07.040497Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:07.040709Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 219us result status StatusSuccess 2026-02-19T08:21:07.041371Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 44 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 45 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 44 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 44 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 44 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: 2026-02-19T08:19:51.095218Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488143086827652:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:51.095332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:51.488638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:51.511694Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:51.515673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:51.515779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:51.517054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488143086827417:2081] 1771489191078886 != 1771489191078889 2026-02-19T08:19:51.532205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:51.626736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:51.626758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:51.626769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:51.626870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:51.676149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:52.102106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:52.215563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:52.226295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:54.668609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488155971730199:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.668772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.674296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488155971730211:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.674386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488155971730212:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.674551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:54.680003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:54.696434Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488155971730215:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:19:54.824259Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488155971730266:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:55.188456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.738278Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488162636175286:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:56.738333Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:56.805859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:56.966716Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488162636175067:2081] 1771489196700159 != 1771489196700162 2026-02-19T08:19:56.996743Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:57.006867Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:57.006944Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:57.016599Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:57.129763Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:57.198323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:57.198349Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:57.198356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:57.198437Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:57.601061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:57.608052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:19:57.733849Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:20:00.077891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488179816045123:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:00.078015Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:00.078324Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488179816045155:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:00.081886Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488179816045157:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:00.081996Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:00.082409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:00.107820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:20:00.110539Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608488179816045158:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:20:00.204540Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608488179816045210:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:00.272261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488286736497311:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:20:30.301905Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:20:30.617851Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488308211335641:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.617968Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.618608Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488308211335650:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.618683Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:30.702504Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.745369Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.799406Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.897188Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:30.984156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:31.068435Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:31.158885Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:31.240662Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:31.358687Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488312506303823:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:31.358838Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:31.359183Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488312506303828:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:31.359195Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488312506303829:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:31.359285Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:31.364281Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:20:31.382052Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488312506303832:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:20:31.449191Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488312506303883:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:33.544881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:40.395640Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:20:40.395677Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:45.278278Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:45.338330Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:46.616623Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:46.654566Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:47.987083Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:52.512574Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtfs2bx06fsgqwecjcxmmj5", SessionId: ydb://session/3?node_id=5&id=Y2I1MTRlZGUtNTdkMTVhZWQtYWNjZjBkZDAtNzMzZGE5MDc=, Slow query, duration: 19.231002s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-02-19T08:20:54.406436Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-02-19T08:20:54.836011Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtfs2bx06fsgqwecjcxmmj5", SessionId: ydb://session/3?node_id=5&id=Y2I1MTRlZGUtNTdkMTVhZWQtYWNjZjBkZDAtNzMzZGE5MDc=, Slow query, duration: 21.554437s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-02-19T08:20:54.840606Z --------------- Start update --------------- 2026-02-19T08:20:54.841295Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:20:54.850990Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:21:03.974946Z node 5 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 80.8618 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037927 table: [/Root/test_table] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.097778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.097896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.097962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.098012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.098924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.098993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.099031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.099092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.099203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.099337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.100363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.104321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.241269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.241340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.260890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.261580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.263279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.276868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.280706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.282872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.285353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.294650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.294866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.300090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.300184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.300329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.300382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.300430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.301291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.478443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.482955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.483537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:21:07.208377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:21:07.208429Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 ProposedWaitParts -> Done 2026-02-19T08:21:07.209689Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:21:07.209806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:21:07.209847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:21:07.209886Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2026-02-19T08:21:07.209933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:21:07.211564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:21:07.211650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:21:07.211681Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:21:07.211710Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:21:07.211745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:21:07.211828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-02-19T08:21:07.213762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:21:07.213821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:07.214056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:21:07.214173Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:21:07.214220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:21:07.214257Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:21:07.214290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:21:07.214326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-02-19T08:21:07.214388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:747:2724] message: TxId: 108 2026-02-19T08:21:07.214429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:21:07.214471Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-02-19T08:21:07.214503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 108:0 2026-02-19T08:21:07.214597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:07.215469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:21:07.217703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:21:07.218178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:21:07.218225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:1247:3202] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-02-19T08:21:07.218946Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-02-19T08:21:07.219008Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2026-02-19T08:21:07.238265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 721 RawX2: 8589937298 } TabletId: 72075186233409546 State: 4 2026-02-19T08:21:07.238385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2026-02-19T08:21:07.239988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:21:07.240095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:21:07.240499Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:21:07.240685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:07.240949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 Forgetting tablet 72075186233409546 2026-02-19T08:21:07.243382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:07.243439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:21:07.243541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:21:07.252722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:21:07.252811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:21:07.253330Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2026-02-19T08:21:07.254141Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:07.254325Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 205us result status StatusSuccess 2026-02-19T08:21:07.254724Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 26 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 26 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 24 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::Replace2 [GOOD] >> WebLoginServiceAudit::AuditLogLoginSuccess [GOOD] >> WebLoginServiceAudit::AuditLogLoginBadPassword >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.097750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.097858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.097969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.098021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.098742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.098816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.098862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.098892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.099039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.099146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.100250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.104099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.232057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.232111Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.246582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.248181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.251227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.282028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.283595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.284440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.285317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.291061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.292729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.301372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.301485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.301672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.301757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.301819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.302082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.496954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.497931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.498883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Id: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 59 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 59 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:08.369415Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:08.369605Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dst/Async" took 216us result status StatusSuccess 2026-02-19T08:21:08.370332Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst/Async" PathDescription { Self { Name: "Async" PathId: 57 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 56 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 58 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 57 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 57 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 57 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:08.371260Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:08.371419Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 182us result status StatusSuccess 2026-02-19T08:21:08.371880Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 39 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 39 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 37 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 56 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMonitoringTests::ValidActorId |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] >> TMonitoringTests::ValidActorId [GOOD] >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] >> TMonitoringTests::InvalidActorId |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> WebLoginServiceAudit::AuditLogEmptySIDsLoginSuccess |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] Test command err: 2026-02-19T08:21:06.847888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488463851938841:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:06.848007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:07.195043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:07.195236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:07.229325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:07.251129Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:07.253559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:07.254005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488463851938811:2081] 1771489266842311 != 1771489266842314 2026-02-19T08:21:07.341165Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.clusters.get ydb.clusters.manage ydb.clusters.monitor) 2026-02-19T08:21:07.341244Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d592a4003d0] Connect to grpc://localhost:23926 2026-02-19T08:21:07.351774Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d592a4003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.monitor" } } result_filter: ALL_FAILED } 2026-02-19T08:21:07.369446Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d592a4003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:07.369764Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:07.370861Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-02-19T08:21:07.419826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] Test command err: 2026-02-19T08:21:06.847852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488463732793576:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:06.847898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:07.189467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:07.192005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:07.234283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:07.252271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:07.278459Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:07.281148Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488463732793546:2081] 1771489266844132 != 1771489266844135 2026-02-19T08:21:07.329286Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-02-19T08:21:07.329387Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d6098a003d0] Connect to grpc://localhost:19062 2026-02-19T08:21:07.339778Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d6098a003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.databases.l...(truncated) } 2026-02-19T08:21:07.369522Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d6098a003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:07.369809Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:07.370851Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root, user: user1@as, from ip: 2026-02-19T08:21:07.452651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] Test command err: 2026-02-19T08:21:06.850979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488464393833931:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:06.851024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:06.900013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:07.197402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:07.197470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:07.205654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:07.242346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:07.280293Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:07.281602Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488464393833898:2081] 1771489266833384 != 1771489266833387 2026-02-19T08:21:07.328091Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.developerApi.update ydb.tables.write ydb.databases.create ydb.databases.connect ydb.developerApi.get ydb.tables.select) 2026-02-19T08:21:07.328166Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c8520bf07d0] Connect to grpc://localhost:29188 2026-02-19T08:21:07.336372Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c8520bf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-02-19T08:21:07.369671Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c8520bf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:07.370030Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:07.371027Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-02-19T08:21:07.513753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true [GOOD] >> VectorIndexBuildTest::Metering_CommonDB |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::AsyncIndexKeySizeConstraint >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] >> DataShardWrite::UpsertWithDefaults >> DataShardWrite::UpsertIncrement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:03.643152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:03.643233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.643297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:03.643331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:03.643376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:03.643412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:03.643443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:03.643469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:03.643590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:03.643675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:03.644492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:03.644799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:03.729638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:03.729680Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.740191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:03.740712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:03.740907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:03.750491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:03.751120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:03.751835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:03.752078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:03.754527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.754702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:03.755591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:03.755632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:03.755703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:03.755737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:03.755763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:03.755882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:03.860306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.861976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.862064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.862122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.862200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:03.862266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:21:11.249452Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 65], version: 18446744073709551615 2026-02-19T08:21:11.249478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 4 2026-02-19T08:21:11.249881Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:21:11.249953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:21:11.249980Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:21:11.250026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 66], version: 18446744073709551615 2026-02-19T08:21:11.250056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 3 2026-02-19T08:21:11.250706Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:21:11.250802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:21:11.250832Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:21:11.250855Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 67], version: 18446744073709551615 2026-02-19T08:21:11.250880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 5 2026-02-19T08:21:11.250993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 4/6, is published: true 2026-02-19T08:21:11.251478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:4, at schemeshard: 72057594046678944 2026-02-19T08:21:11.251525Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 107:4 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:11.251726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 4 2026-02-19T08:21:11.251799Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-02-19T08:21:11.251819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-02-19T08:21:11.251840Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-02-19T08:21:11.251858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-02-19T08:21:11.251879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 5/6, is published: true 2026-02-19T08:21:11.252725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-02-19T08:21:11.252756Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 107:2 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:21:11.252887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 3 2026-02-19T08:21:11.252940Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-02-19T08:21:11.252956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-02-19T08:21:11.252987Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-02-19T08:21:11.253009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-02-19T08:21:11.253033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 6/6, is published: true 2026-02-19T08:21:11.253089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:737:2714] message: TxId: 107 2026-02-19T08:21:11.253134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-02-19T08:21:11.253190Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:21:11.253227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:0 2026-02-19T08:21:11.253357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:21:11.253398Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-02-19T08:21:11.253418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:1 2026-02-19T08:21:11.253445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 64] was 4 2026-02-19T08:21:11.253477Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-02-19T08:21:11.253496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:2 2026-02-19T08:21:11.253562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 2 2026-02-19T08:21:11.253582Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:3 2026-02-19T08:21:11.253604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:3 2026-02-19T08:21:11.253630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 2 2026-02-19T08:21:11.253645Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:4 2026-02-19T08:21:11.253655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:4 2026-02-19T08:21:11.253696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 3 2026-02-19T08:21:11.253737Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:5 2026-02-19T08:21:11.253750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:5 2026-02-19T08:21:11.253782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 68] was 1 2026-02-19T08:21:11.254067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:11.254111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 68], at schemeshard: 72057594046678944 2026-02-19T08:21:11.254199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 2 2026-02-19T08:21:11.256067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.256134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.256186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.256322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.256395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.259381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.259449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.259487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.259825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:21:11.260218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:21:11.260272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:4134:5884] 2026-02-19T08:21:11.261631Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout >> KqpStreamLookup::ReadTableDuringSplit >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::ReplaceImmediate >> KqpStreamLookup::ReadTableWithIndexDuringSplit |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] Test command err: 2026-02-19T08:21:09.155305Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488476194569196:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:09.155414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:09.395122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:09.401327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:09.401454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:09.477338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:09.519862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488476194569166:2081] 1771489269152345 != 1771489269152348 2026-02-19T08:21:09.530240Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:09.568170Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-02-19T08:21:09.568248Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d12101003d0] Connect to grpc://localhost:14849 2026-02-19T08:21:09.571869Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d12101003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-02-19T08:21:09.599037Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d12101003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:09.599528Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:09.601408Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: 2026-02-19T08:21:09.604605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |92.5%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestPurgeQueue [GOOD] |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> WebLoginServiceAudit::AuditLogLoginBadPassword [GOOD] >> WebLoginServiceAudit::AuditLogLdapLoginSuccess |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache |92.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |92.5%| [TA] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |92.5%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorization |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |92.5%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |92.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> S3Writer::WriteTableS3 >> TResourcePoolTest::ParallelCreateResourcePool |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties >> TResourcePoolTest::DropResourcePool >> TResourcePoolTest::ReadOnlyMode >> WebLoginServiceAudit::AuditLogEmptySIDsLoginSuccess [GOOD] >> WebLoginServiceAudit::AuditLogAdminLoginSuccess >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestPurgeQueue [GOOD] Test command err: 2026-02-19T08:18:13.449086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487723120034603:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:13.449132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:18:14.106810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:18:14.129759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:18:14.129848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:18:14.257584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:18:14.426804Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2205} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.104878s 2026-02-19T08:18:14.426892Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1012} StateWork event processing took too much time Type# 2146435078 Duration# 0.104999s 2026-02-19T08:18:14.426965Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:14.429916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487723120034369:2081] 1771489093401452 != 1771489093401455 2026-02-19T08:18:14.446791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:18:14.455306Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:18:14.528951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:18:14.528989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:18:14.529000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:18:14.529122Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:18:14.776317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:18:14.794064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:18:15.023125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:18:15.035128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:18:15.038912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:18:15.082242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-02-19T08:18:15.184323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.360620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.444795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.528556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-02-19T08:18:15.534705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.605573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.671792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.747966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.825220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.900394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:15.978072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:18.014745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487744594872515:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:18.014905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:18.015525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487744594872527:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:18.015581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487744594872528:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:18.015749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:18:18.020652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:18:18.064428Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487744594872531:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-02-19T08:18:18.125401Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487744594872582:3106] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:18.449831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487723120034603:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:18:18.449909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:18:18.678335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:18:18.731080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... : [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7608488493552162793:2569], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:21:13.982546Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.984119Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488493552162796:2570], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.984182Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.989131Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:21:13.991184Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608488493552162790:2567]: Pool not found 2026-02-19T08:21:13.992024Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-02-19T08:21:13.995981Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7608488493552162816:2576], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-02-19T08:21:13.996007Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488493552162815:2575], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.996108Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.996613Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488493552162820:2577], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.997051Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:13.997097Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:71: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPPurgeRequest Topic: "/Root/ExampleQueueName" Consumer: "ydb-sqs-consumer" PartitionId: 0 Http output full {} 2026-02-19T08:21:13.998486Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:21:13.998515Z node 7 :PERSQUEUE DEBUG: partition.cpp:2438: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-02-19T08:21:13.998554Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:21:13.998571Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:21:13.998596Z node 7 :PERSQUEUE DEBUG: partition.cpp:2502: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-02-19T08:21:13.998656Z node 7 :PERSQUEUE DEBUG: partition.cpp:3914: [72075186224037907][Partition][0][StateIdle] Topic 'ExampleQueueName' partition 0 user ydb-sqs-consumer offset is set to 2 (startOffset 0) session 2026-02-19T08:21:13.998677Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:21:13.998695Z node 7 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:21:13.998715Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:21:13.998946Z node 7 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:21:13.999961Z node 7 :SQS INFO: http_req.cpp:1237: Request [2c8cc60d-3a6f2768-53649547-d92bfe22] Request done. Api [SqsTopic] Action [PurgeQueue] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [189b:18bc:217c:0:9b:18bc:217c:0] Duration [27ms] Code [200] 2026-02-19T08:21:13.999990Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [PurgeQueue] requestId [2c8cc60d-3a6f2768-53649547-d92bfe22] reply ok 2026-02-19T08:21:14.000336Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:47108) <- (200 , 2 bytes) 2026-02-19T08:21:14.000449Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:47108) connection closed 2026-02-19T08:21:14.002770Z node 7 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:21:14.002848Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'ExampleQueueName' partition 0 user ydb-sqs-consumer readTimeStamp for offset 2 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:21:14.002892Z node 7 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:21:14.002921Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:21:14.002936Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.002948Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:21:14.002985Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.003005Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:21:14.003036Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:21:14.003076Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:1202: [PQ: 72075186224037907] Topic 'ExampleQueueName' counters. CacheSize 142 CachedBlobs 2 2026-02-19T08:21:14.007844Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7608488493552162813:2574]: Pool not found 2026-02-19T08:21:14.008841Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete E0219 08:21:14.011277636 324777 chttp2_transport.cc:1146] ipv4:127.0.0.1:16828: Received a GOAWAY with error code ENHANCE_YOUR_CALM and debug data equal to "too_many_pings". Current keepalive time (before throttling): 10000ms 2026-02-19T08:21:14.024436Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:21:14.024466Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.024479Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:21:14.024494Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.024507Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:21:14.024730Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "ydb-sqs-consumer" CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 2 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 508 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 1 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 2026-02-19T08:21:14.125138Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:21:14.125179Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.125192Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:21:14.125212Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.125224Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:21:14.225806Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:21:14.225841Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.225855Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:21:14.225873Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:21:14.225886Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist >> DataShardWrite::UpsertIncrement [GOOD] >> DataShardWrite::UpsertImmediate >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> TResourcePoolTest::DropResourcePoolTwice >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables >> TResourcePoolTest::SchemeErrors >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink-UseStreamIndex >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> TResourcePoolTest::DropResourcePool [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TResourcePoolTest::AlterResourcePool >> KqpExplain::SortStage >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> VectorIndexBuildTest::Metering_CommonDB [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:16.596604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:16.596719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:16.596801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:16.596859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:16.596906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:16.596947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:16.596979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:16.597009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:16.597107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:16.597190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:16.599101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:16.599508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:16.694945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:16.695008Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.713477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:16.714450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:16.714673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:16.727259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:16.728058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:16.728814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:16.729115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:16.732224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:16.732414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:16.733611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:16.733672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:16.733840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:16.733894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:16.733944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:16.734149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:16.876248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.877922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:16.878392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... t@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:17.807339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:17.807641Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 228us result status StatusSuccess 2026-02-19T08:21:17.807985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:17.808757Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:17.808941Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools" took 197us result status StatusSuccess 2026-02-19T08:21:17.809397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools" PathDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:17.810028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:17.810219Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" took 200us result status StatusSuccess 2026-02-19T08:21:17.810542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" PathDescription { Self { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:17.811169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:17.811430Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 226us result status StatusSuccess 2026-02-19T08:21:17.811779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> TResourcePoolTest::ReadOnlyMode [GOOD] >> TResourcePoolTest::DropResourcePoolTwice [GOOD] >> KqpQuery::QueryTimeout >> TResourcePoolTest::AlterResourcePool [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> WebLoginServiceAudit::AuditLogLdapLoginSuccess [GOOD] >> TResourcePoolTest::SchemeErrors [GOOD] >> TTicketParserTest::BulkAuthorization [GOOD] >> S3Writer::WriteTableS3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:17.249631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:17.249761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.249856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:17.249894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:17.249930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:17.249984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:17.250026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:17.250054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:17.250163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.250249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:17.251185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:17.251529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:17.352647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:17.352706Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:17.367120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:17.367817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:17.368014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:17.380031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:17.380859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:17.381628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:17.381987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:17.386101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.386313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:17.387725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:17.387791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.387898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:17.387955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:17.388004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:17.388181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:17.554180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.558971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.559041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.559108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.559169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.559259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.559326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 057594046678944 2026-02-19T08:21:18.411360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:21:18.411547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:21:18.411728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:18.411790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:21:18.412450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:21:18.413760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:21:18.413891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:21:18.419666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:18.419708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:21:18.419889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:21:18.419980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:21:18.420089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.420157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-02-19T08:21:18.420198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-02-19T08:21:18.420233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-02-19T08:21:18.420531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:21:18.420572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:21:18.420690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:21:18.420734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:21:18.420780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:21:18.420818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:21:18.420853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:21:18.420910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:21:18.420957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:21:18.420988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:21:18.421072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:21:18.421112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:21:18.421143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-02-19T08:21:18.421171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-02-19T08:21:18.422197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:21:18.422368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:21:18.422405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:21:18.422442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-02-19T08:21:18.422490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:18.423303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:21:18.423388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:21:18.423423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:21:18.423452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-02-19T08:21:18.423485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:21:18.423554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:21:18.427882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:21:18.428133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:21:18.428293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:21:18.428324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:21:18.428646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:21:18.428709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:18.428735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:746:2734] TestWaitNotification: OK eventTxId 102 2026-02-19T08:21:18.429068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:18.429281Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 193us result status StatusSuccess 2026-02-19T08:21:18.429623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { Properties { key: "concurrent_query_limit" value: "10" } Properties { key: "query_cancel_after_seconds" value: "60" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists >> WebLoginServiceAudit::AuditLogLdapLoginBadPassword >> TTicketParserTest::NebiusAuthorization >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> KqpQuery::YqlTableSample >> DataShardWrite::UpsertImmediate [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> DataShardWrite::DeleteImmediate [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> DataShardVolatile::VolatileTxAbortedOnDrop >> DataShardWrite::UpsertImmediateManyColumns >> DataShardWrite::CancelImmediate >> TTicketParserTest::NebiusAuthenticationRetryError >> DataShardWrite::WriteImmediateSeveralOperations |92.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:17.230123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:17.230236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.230301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:17.230357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:17.230402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:17.230446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:17.230497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:17.230531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:17.230620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.230711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:17.231700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:17.232036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:17.333234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:17.333276Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:17.346022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:17.346848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:17.347068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:17.360681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:17.361384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:17.362188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:17.362472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:17.365850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.366067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:17.367070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:17.367131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.367244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:17.367298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:17.367356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:17.367531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:17.509466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.510677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.510809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.510913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.511820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:18.377955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:21:18.378107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:21:18.378195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:21:18.378355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.378407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:21:18.378477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-02-19T08:21:18.378507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-02-19T08:21:18.378908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:18.378954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:21:18.379066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:21:18.379102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:18.379150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:21:18.379194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:18.379238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:21:18.379285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:18.379321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:21:18.379356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:21:18.379452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:21:18.379507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-02-19T08:21:18.379587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-02-19T08:21:18.379622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-02-19T08:21:18.379647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-02-19T08:21:18.380232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.380319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.380350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:18.380396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-02-19T08:21:18.380453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:21:18.380856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:18.380904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:18.380973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:18.381317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.381392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.381424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:18.381453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-02-19T08:21:18.381494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:18.382267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.382340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:18.382380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:18.382410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-02-19T08:21:18.382443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:18.382524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:21:18.386174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:21:18.386355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:21:18.386903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:21:18.387406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:21:18.387774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:21:18.387826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:21:18.388249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:21:18.388355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:21:18.388394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:777:2765] TestWaitNotification: OK eventTxId 103 2026-02-19T08:21:18.388860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:18.389138Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 294us result status StatusPathDoesNotExist 2026-02-19T08:21:18.389333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:18.258142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:18.258249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:18.258320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:18.258387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:18.258442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:18.258492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:18.258523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:18.258548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:18.258647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:18.258723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:18.259681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:18.259995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:18.347102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:18.347159Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:18.362084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:18.362824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:18.363019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:18.375108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:18.375893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:18.376798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:18.377128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:18.380551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.380845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:18.382316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:18.382383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.382512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:18.382584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:18.382635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:18.382831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:18.541418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.542579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.542712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.542809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.542950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.543800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:19.326218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:21:19.326364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:21:19.326432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:21:19.326584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:19.326627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:21:19.326680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-02-19T08:21:19.326720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-02-19T08:21:19.326989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.327026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:21:19.327117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:21:19.327149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:19.327212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:21:19.327247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:19.327283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:21:19.327321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:21:19.327371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:21:19.327428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:21:19.327495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:21:19.327534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-02-19T08:21:19.327583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-02-19T08:21:19.327608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-02-19T08:21:19.327633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-02-19T08:21:19.328193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.328285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.328319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:19.328371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-02-19T08:21:19.328425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:21:19.328969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:19.329015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:19.329082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:19.329423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.329508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.329554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:19.329585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-02-19T08:21:19.329613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:19.330781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.330867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:21:19.330910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:21:19.330945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-02-19T08:21:19.330977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:19.331040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:21:19.334064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:21:19.334745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:21:19.334875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:21:19.335248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:21:19.335583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:21:19.335625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:21:19.336015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:21:19.336115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:21:19.336166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:780:2768] TestWaitNotification: OK eventTxId 103 2026-02-19T08:21:19.336619Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:19.336876Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 251us result status StatusPathDoesNotExist 2026-02-19T08:21:19.337098Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:18.554850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:18.554966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:18.555029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:18.555089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:18.555152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:18.555195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:18.555226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:18.555253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:18.555352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:18.555442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:18.556504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:18.556790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:18.650890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:18.650947Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:18.669705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:18.670926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:18.671129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:18.681643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:18.682554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:18.683468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:18.683858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:18.687780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.688009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:18.689419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:18.689481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:18.689599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:18.689657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:18.689700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:18.689913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:18.849861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.850915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:18.851972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:19.675313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-02-19T08:21:19.675385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-02-19T08:21:19.675431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 124 2026-02-19T08:21:19.675500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 124, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:21:19.675534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:19.675622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 124, subscribers: 0 2026-02-19T08:21:19.678114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-02-19T08:21:19.686462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-02-19T08:21:19.686637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-02-19T08:21:19.686699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 TestModificationResult got TxId: 124, wait until txId: 124 TestWaitNotification wait txId: 124 2026-02-19T08:21:19.686949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 124: send EvNotifyTxCompletion 2026-02-19T08:21:19.686999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 124 2026-02-19T08:21:19.687470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-02-19T08:21:19.687610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-02-19T08:21:19.687661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:717:2705] TestWaitNotification: OK eventTxId 124 TestModificationResults wait txId: 125 2026-02-19T08:21:19.691439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:21:19.691750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/AnotherDir, operationId: 125:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.691903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:19.691968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 0 2026-02-19T08:21:19.692001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 125:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 41] source path: 2026-02-19T08:21:19.692063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:21:19.692107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 125:1, path# /MyRoot/AnotherDir/MyResourcePool 2026-02-19T08:21:19.692226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-02-19T08:21:19.693769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 125:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 2026-02-19T08:21:19.693988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:289: MkDir AbortPropose, opId: 125:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.696935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 125, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:19.697235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2026-02-19T08:21:19.703433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:21:19.703753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/.metadata/workload_manager/pools/AnotherDir, operationId: 126:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.703898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 40], parent name: pools, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-02-19T08:21:19.703986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 0 2026-02-19T08:21:19.704023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 126:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 42] source path: 2026-02-19T08:21:19.704105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:21:19.704164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 126:1, path# /MyRoot/.metadata/workload_manager/pools/AnotherDir/MyResourcePool 2026-02-19T08:21:19.704260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-02-19T08:21:19.705574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 126:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 2026-02-19T08:21:19.705690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:289: MkDir AbortPropose, opId: 126:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.711709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:19.711933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-02-19T08:21:19.714912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "" } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:21:19.715119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 127:0, path# /MyRoot/.metadata/workload_manager/pools/ 2026-02-19T08:21:19.715292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-02-19T08:21:19.717811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/\', error: path part shouldn\'t be empty" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:19.718017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, operation: CREATE RESOURCE POOL, path: TestModificationResult got TxId: 127, wait until txId: 127 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_s3_writer/unittest >> S3Writer::WriteTableS3 [GOOD] Test command err: 2026-02-19T08:21:17.278839Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488510726027633:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:17.279149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:17.344721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:18.103681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:18.103803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:18.122394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:18.128807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:18.169906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488510726027519:2081] 1771489277254014 != 1771489277254017 2026-02-19T08:21:18.207675Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:18.276771Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:18.373837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:19.035316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:19.035338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:19.035345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:19.035467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:19.678264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:19.701037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:19.745188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) REQUEST: PUT /TEST/writer.AtufpxzetsqaVnEuozdXpD.json HTTP/1.1 HEADERS: Host: localhost:62293 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 091D3C19-BA17-4269-A4DE-7CB60450ED65 amz-sdk-request: attempt=1 content-length: 86 content-md5: pMYsVvGh+5u97Zv4QREMiA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/writer.AtufpxzetsqaVnEuozdXpD.json / / 86 REQUEST: PUT /TEST/part.1.AtufpxzetsqaVnEuozdXpD.jsonl HTTP/1.1 HEADERS: Host: localhost:62293 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 9414D6C0-4423-41F9-9722-96544208F167 amz-sdk-request: attempt=1 content-length: 111 content-md5: BjI3MWdGo2WgXhR5x59jqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/part.1.AtufpxzetsqaVnEuozdXpD.jsonl / / 111 REQUEST: PUT /TEST/writer.AtufpxzetsqaVnEuozdXpD.json HTTP/1.1 HEADERS: Host: localhost:62293 Accept: */* Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAAQAAAAIAAAAA amz-sdk-invocation-id: 973C5B0C-D6E3-4D77-B7E8-44390F9CDA7F amz-sdk-request: attempt=1 content-length: 85 content-md5: 4FipNIGjnMvES2WURx6y5Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD Connection: Upgrade, HTTP2-Settings S3_MOCK::HttpServeWrite: /TEST/writer.AtufpxzetsqaVnEuozdXpD.json / / 85 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:17.329345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:17.329444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.329511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:17.329567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:17.329676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:17.329802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:17.329836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:17.329862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:17.329951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:17.330025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:17.330897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:17.331193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:17.412825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:17.412916Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:17.426063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:17.430621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:17.430857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:17.445166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:17.445953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:17.446710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:17.447020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:17.453240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.453486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:17.454929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:17.454994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:17.455114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:17.455180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:17.455223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:17.455410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:17.622345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.623436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.623597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.623720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.623865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.623935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:17.624582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... meshard: 72057594046678944 2026-02-19T08:21:19.481983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-02-19T08:21:19.482159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:21:19.483037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.483144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.483184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:21:19.483227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:21:19.483270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:21:19.484369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.484454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.484483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:21:19.484516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 2 2026-02-19T08:21:19.484568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-02-19T08:21:19.484643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-02-19T08:21:19.487207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-02-19T08:21:19.487361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:21:19.488761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-02-19T08:21:19.489739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:19.489902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:19.489962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:35: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-02-19T08:21:19.490155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 129:0 Propose -> Done 2026-02-19T08:21:19.490357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:21:19.490433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-02-19T08:21:19.490750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-02-19T08:21:19.492491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:19.492531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:19.492715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-02-19T08:21:19.492834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:19.492871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:896:2850], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-02-19T08:21:19.492934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:896:2850], at schemeshard: 72057594046678944, txId: 129, path id: 43 2026-02-19T08:21:19.493203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.493265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 129:0 ProgressState 2026-02-19T08:21:19.493371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:21:19.493411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:21:19.493452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:21:19.493488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:21:19.493550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-02-19T08:21:19.493597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:21:19.493636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-02-19T08:21:19.493669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 129:0 2026-02-19T08:21:19.493775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-02-19T08:21:19.493822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-02-19T08:21:19.493860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2026-02-19T08:21:19.493895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 43], 3 2026-02-19T08:21:19.494616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.494699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.494750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:21:19.494793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-02-19T08:21:19.494839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:21:19.495614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.495701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:21:19.495731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:21:19.495772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-02-19T08:21:19.495811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-02-19T08:21:19.495886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-02-19T08:21:19.500464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-02-19T08:21:19.500570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthenticationUnsupported |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> WebLoginServiceAudit::AuditLogAdminLoginSuccess [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> WebLoginServiceAudit::AuditLogLdapLoginBadBind >> DataShardWrite::IncrementImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:19.217581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:19.217652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:19.217730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:19.217776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:19.217805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:19.217841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:19.217867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:19.217884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:19.217965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:19.218017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:19.218655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:19.218872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:19.308109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:19.308178Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:19.324005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:19.327481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:19.327977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:19.353817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:19.354667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:19.355394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:19.355746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:19.359117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:19.359297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:19.360591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:19.360660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:19.360776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:19.360819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:19.360859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:19.361025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:19.528829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.529788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.529896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:19.530875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... is done id#101:0 progress is 3/3 2026-02-19T08:21:22.370121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:21:22.370140Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:21:22.370156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:21:22.370179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-02-19T08:21:22.370213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:21:22.370259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:21:22.370295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:21:22.370352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:21:22.370379Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-02-19T08:21:22.370394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:1 2026-02-19T08:21:22.370414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:21:22.370431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-02-19T08:21:22.370444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:2 2026-02-19T08:21:22.370464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:22.370484Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-02-19T08:21:22.370513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-02-19T08:21:22.370541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-02-19T08:21:22.370557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 5 2026-02-19T08:21:22.370576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-02-19T08:21:22.371565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.371650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.371744Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:22.371790Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:21:22.371836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:21:22.372667Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.372725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.372754Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:22.372784Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-02-19T08:21:22.372815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:21:22.373170Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.373222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.373244Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:22.373272Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-02-19T08:21:22.373301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:21:22.374239Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.374301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:22.374333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:22.374367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:21:22.374390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:22.374447Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:21:22.377100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:21:22.377172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:21:22.377273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:21:22.378201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:21:22.378461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:21:22.378512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:21:22.378921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:21:22.379028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:21:22.379069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:728:2717] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:21:22.381774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpAlterResourcePool CreateResourcePool { Name: "MyResourcePool" Properties { Properties { key: "concurrent_query_limit" value: "20" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:21:22.382019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_resource_pool.cpp:123: [72057594046678944] TAlterResourcePool Propose: opId# 102:0, path# /MyRoot/.metadata/workload_manager/pools/MyResourcePool 2026-02-19T08:21:22.382218Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), at schemeshard: 72057594046678944 2026-02-19T08:21:22.390926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:22.391269Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), operation: ALTER RESOURCE POOL, path: MyResourcePool TestModificationResult got TxId: 102, wait until txId: 102 >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> KqpPg::TempTablesWithCache [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> KqpPg::TableDeleteWhere+useSink |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/statistics/ydb-core-kqp-ut-olap-statistics |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> StoragePool::TestDistributionRandomProbability [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> KqpQuery::UdfMemoryLimit >> KqpLimits::KqpMkqlMemoryLimitException >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/statistics/ydb-core-kqp-ut-olap-statistics |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |92.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/statistics/ydb-core-kqp-ut-olap-statistics |92.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_s3_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |92.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> KqpLimits::OutOfSpaceBulkUpsertFail >> WebLoginServiceAudit::AuditLogLdapLoginBadPassword [GOOD] >> WebLoginServiceAudit::AuditLogLdapLoginBadUser |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_s3_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_s3_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::SortStage [GOOD] >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> IndexBuildTest::CancellationNoTable >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:20:52.552164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:52.552261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.552334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:52.552372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:52.552409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:52.552482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:52.552531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:52.552581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:52.552647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:52.552721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:52.553581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:52.553967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:52.647963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:20:52.648013Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:52.659691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:52.660836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:52.661051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:52.674737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:52.675949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:52.676825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:52.677144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:52.680421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.680589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:52.681684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:52.681762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:52.681869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:52.681912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:52.681960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:52.682163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:20:52.834161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.835943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.836019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.836104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.836203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:20:52.836275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:21:21.318583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:21:21.318769Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.318877Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.318955Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.319062Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.319146Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.319288Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.319614Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.319750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320196Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320289Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320516Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320690Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320743Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.320841Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.321397Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.321501Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.321934Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322225Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322305Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322362Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322580Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322643Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.322694Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:21:21.332942Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:21.363727Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:21.363835Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:21.363995Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:21.364055Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:21.364106Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:21.372152Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:788:2754] sender: [4:847:2058] recipient: [4:15:2062] 2026-02-19T08:21:21.416601Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:21.416671Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:121: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-02-19T08:21:21.584058Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-02-19T08:21:21.584271Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:21.584350Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:21.584621Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:21.584676Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:838:2792], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-02-19T08:21:21.585406Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 0 2026-02-19T08:21:23.590385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:59: TTxLogin Execute at schemeshard: 72057594046678944 2026-02-19T08:21:23.598121Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-02-19T08:21:23.619010Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:116: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-02-19T08:21:23.619206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-02-19T08:21:23.620055Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:23.620333Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 381us result status StatusSuccess 2026-02-19T08:21:23.621092Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoUDZdsRrcPx0RU6aKfYD\n8HYoG8iUSVJtfycJTHJU6dIcOiCGsxsdwDP53vOqQtcaOR+vSzC4EesfOnb/GMlq\ngFjZTDzjaYy4AVWIKh3SJCMGIA1nfN3rkwFM74Mwxf1fH4JN6Iny3JXBLz1+9e/E\ndQBEysALQar4oW9Q7o7i5l0cKHkuqyakM8+9jMXelwCBPdedGkko+xvZ7/Wa+BHW\nxXJ7IPSzcvdxw4eYTfaCxshvugmEmE+BE7x68dhA1+UULlbEftPZU3BNoi666Bt6\nWuNo617D8eg55XAXMwhr82GwmMjahVXyHmDwCDpO/ocFdN1wQN2DnTHwN4mMA1dn\njwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575678703 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1+TrLrAhOddbtLoe7tbp\nAtm0sl97z3EKdhbobEWRFjxkSpxKFoqPd0hLYJCVRUG8ImYL5XH1LzAgp3+gtjjd\n+6ksnyMWk7xsxlREqavi6kayhKBuwEZ9Dpo+wYnPOUiEA4C1n6MEIUM0D9AJzFwN\nBm1pB+1FQFluVuqcLduQrHGuxkhQK7QmvqJAEa2EJ2sXCU/ZHS7qQj0avgIeoNTw\nVresG+UMxyLa/eQR3mLN00gxe1fMKBK0Dwo/nBVrN5AIFS54isjlcwoqbMWqn0US\nnPBvau9nHrBCw4b1lIOjlXDrgTxCwRcNtBPCjlIcTKC8ejq11uf8U/IiJm/i/SMN\nuwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575679132 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArL529VIZaz9vEAXsd+1p\nw+Kf9lI9fpnvLCGAdWREWI2oea4IU0ozYl1y5+5fJ2ZaC8FMz7yyxr1PlO8RYIYB\n8/mEYP5WABDHP95fTWT5HSeQoOobaPRZXL1BOKakljoc3PnNknVeXTg3+lfpRXEi\nWQYzCuz+fnRaR0KPSYd9KxD7qiePVcbGNeCQQ9yop+ZfGCgeRz7y9I20mo3lIypJ\ndz+Uq8Sgfsz20T6BchMvSP0GL/IoAtxq0HSsf7J4dcn1UhcdN/0me6P1Uyq+59/0\n3UgKMgY61nl4JFxwxsnJdlt7KYvsQL3jpQISx9TidZovqqykuZ8C48q2fwi89/ey\nYwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1771575681578 } Sids { Name: "user1" Type: USER HashesInitParams { HashType: Argon InitParams: "vAznSFf9JltUvM3DkxJN/w==" } } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::QueryTimeout [GOOD] >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> KqpQuery::RandomNumber >> TTicketParserTest::AuthenticationUnsupported [GOOD] |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> TTicketParserTest::AuthenticationUnknown >> DataShardWrite::UpdateImmediate >> IndexBuildTest::CancellationNoTable [GOOD] >> IndexBuildTest::CancellationNoTableUniq |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |92.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |92.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:26.170855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:26.170994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:26.171062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:26.171110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:26.171144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:26.171178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:26.171204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:26.171228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:26.171291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:26.171368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:26.172177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:26.172446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:26.254599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:26.254660Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:26.266412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:26.267075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:26.267283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:26.278426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:26.279201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:26.279935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:26.280294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:26.283414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:26.283647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:26.284821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:26.284878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:26.285006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:26.285051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:26.285087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:26.285251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:26.427540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.428959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:26.429434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:21:27.231410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:27.231451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:27.231614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:21:27.231701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:27.231728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:21:27.231760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:21:27.232097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:21:27.232157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:21:27.232228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:21:27.232251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:21:27.232290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:21:27.232313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:21:27.232338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:21:27.232424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:21:27.232457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:21:27.232489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:21:27.232561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:21:27.232591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:21:27.232639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:21:27.232674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:21:27.233352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:27.233463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:27.233504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:27.233543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:21:27.233583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:21:27.234399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:27.234479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:21:27.234523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:21:27.234553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:21:27.234582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:21:27.234641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:21:27.237002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:21:27.238129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-02-19T08:21:27.238476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:21:27.238528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:21:27.238637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:21:27.238660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-02-19T08:21:27.238712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:21:27.238730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:21:27.239271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:21:27.239392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:21:27.239450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:707:2695] 2026-02-19T08:21:27.239723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:21:27.239839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:21:27.239893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:27.239919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:707:2695] 2026-02-19T08:21:27.240018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:21:27.240043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:707:2695] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-02-19T08:21:27.240526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:27.240736Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 235us result status StatusSuccess 2026-02-19T08:21:27.241140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:16.167354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:16.274995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:16.297753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:16.297844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:16.298533Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:16.579516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.579683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.664577Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489273053706 != 1771489273053710 2026-02-19T08:21:16.673743Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.720138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.807443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:17.112213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:17.127086Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:17.240776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.548293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:959:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.548395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.548450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.549277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:975:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.549378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.554586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:17.702579Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:973:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:21:17.791281Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1031:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink-UseStreamIndex >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork >> IndexBuildTest::CancellationNoTableUniq [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |92.6%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> KqpQuery::YqlTableSample [GOOD] >> KqpQueryDiscard::DiscardSelectSupport |92.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |92.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:16.402923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:16.524286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:16.546757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:16.546840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:16.547416Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:16.845545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.845705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.942879Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489273367354 != 1771489273367358 2026-02-19T08:21:16.956266Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:17.005104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:17.115691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:17.436716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:17.450997Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:17.567326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.962627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1006:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.962807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1017:2858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.962904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.964159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2863], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.964357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.969407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:18.105350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1020:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:21:18.194898Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1079:2901] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} >> WebLoginServiceAudit::AuditLogLdapLoginBadBind [GOOD] >> WebLoginServiceAudit::AuditLogCreateModifyUser |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2026-02-19T08:21:02.771268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488445545244070:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.771469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:03.136245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:03.177899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:03.178007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:03.232977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.234394Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.235434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488445545244038:2081] 1771489262768473 != 1771489262768476 2026-02-19T08:21:03.343447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.343473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.343486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.343555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.432792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.554007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.564517Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket 8EF687A2CB65F225DA29CDE80BB5C5B54D358554D14A39FFD40E7EDCF6BC6B99 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-02-19T08:21:06.569641Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488464609345160:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:06.569703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:06.585268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:06.660213Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:06.661675Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488464609345130:2081] 1771489266568804 != 1771489266568807 2026-02-19T08:21:06.683446Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:06.683513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:06.696943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:06.746459Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:06.746484Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:06.746491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:06.746573Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:06.858763Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:07.003682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:07.008613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:07.014930Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket 09F48ABEE3467DF554E20336196C914853522B5E5014D339EC58348BF4C03ED5 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2026-02-19T08:21:07.015695Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket 09F48ABEE3467DF554E20336196C914853522B5E5014D339EC58348BF4C03ED5: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2026-02-19T08:21:10.707860Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488483516209185:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:10.707911Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:10.720901Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:10.811423Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:10.813364Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488483516209155:2081] 1771489270706743 != 1771489270706746 2026-02-19T08:21:10.826251Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:10.826308Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:10.827755Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:10.863840Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:10.863860Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:10.863867Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:10.863931Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:10.976779Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:11.049462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:11.055784Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket 28D2CE5184AE037406BC8ADB5C8DB9C87AC8EBB177A188003C39476D6A769172 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-02-19T08:21:11.056401Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket 28D2CE5184AE037406BC8ADB5C8DB9C87AC8EBB177A188003C39476D6A769172: Cannot create token from certificate. Client certificate failed verification 2026-02-19T08:21:14.718777Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488500796379605:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:14.728395Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:14.736950Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:14.830573Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:14.833254Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608488500796379485:2081] 1771489274709760 != 1771489274709763 2026-02-19T08:21:14.864020Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:14.864104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:14.875195Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:14.946422Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:15.082490Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:15.082512Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:15.082519Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:15.082602Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:15.491001Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:15.502113Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:21:15.509974Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket 5FE0B39FE200DA5A1B3BCCFAD7997C604506A7192D70F8126FC4188F08177719 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-02-19T08:21:15.716991Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:19.306696Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488522705137606:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:19.306734Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:19.401219Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:19.510281Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:19.510386Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:19.510673Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:19.525844Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488522705137576:2081] 1771489279305630 != 1771489279305633 2026-02-19T08:21:19.542597Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:19.682674Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:19.682705Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:19.682713Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:19.682801Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:19.689806Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:19.967112Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:19.979470Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:19.983173Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket 4E41B45E43FA1CBD020301C68DE57FCCE8C4ECA1BB29D7F6B54E295B15C1721D () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-02-19T08:21:19.983883Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket 4E41B45E43FA1CBD020301C68DE57FCCE8C4ECA1BB29D7F6B54E295B15C1721D: Cannot create token from certificate. Client certificate failed verification 2026-02-19T08:21:24.211456Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608488540143637135:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:24.211509Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:24.326990Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:24.574600Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488540143637108:2081] 1771489284201817 != 1771489284201820 2026-02-19T08:21:24.626914Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:24.630174Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:24.644944Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:24.649800Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:24.708604Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:24.890541Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:24.890565Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:24.890574Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:24.890658Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:25.206455Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:25.251443Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:25.274896Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:25.285592Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket 7296D4BDE27C1CAA71AE752DF3D66500CC564B05C4E8352A9433A284F748F99C () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-02-19T08:21:25.286357Z node 6 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket 7296D4BDE27C1CAA71AE752DF3D66500CC564B05C4E8352A9433A284F748F99C: Cannot create token from certificate. Client certificate failed verification |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex |92.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> WebLoginServiceAudit::AuditLogLdapLoginBadUser [GOOD] >> WebLoginServiceAudit::AuditLogLogout >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::CancelBuild >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization |92.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::UpsertPrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:29.944342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:29.944474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:29.944555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:29.944621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:29.944665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:29.944708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:29.944744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:29.944788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:29.944860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:29.944944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:29.945861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:29.946193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:30.042320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:30.042395Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:30.056521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:30.057380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:30.057607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:30.070692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:30.071489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:30.072338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:30.072706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:30.076238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:30.076443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:30.077739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:30.077818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:30.077945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:30.077995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:30.078039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:30.078253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:30.323882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.324771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.324896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.324951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.324992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:30.325529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... BE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:32.084279Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 275us result status StatusSuccess 2026-02-19T08:21:32.085318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:32.086115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1496:3320], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:21:32.086185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:21:32.086301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:32.086669Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 349us result status StatusSuccess 2026-02-19T08:21:32.087628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "19700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 49 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> TTicketParserTest::NebiusAuthorizationModify >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |92.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:21:32.024225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:32.024310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:32.024392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:32.024465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:32.024497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:32.024533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:32.024571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:32.024613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:32.024676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:32.024748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:32.025547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:32.025857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:32.104803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:21:32.104868Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:32.129038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:32.129841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:32.130026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:32.144346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:32.145130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:32.145881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:32.146238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:32.149733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:32.149933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:32.151201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:32.151262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:32.151394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:32.151437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:32.151475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:32.151675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:32.279468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.280975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.281040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.281103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.281214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.281299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:32.281386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... s: true ShowPrivateTable: true } 2026-02-19T08:21:33.641703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:21:33.641849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:33.642102Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex" took 243us result status StatusSuccess 2026-02-19T08:21:33.642824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Index impl table name: indexImplTable 2026-02-19T08:21:33.643385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1068:3000], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:21:33.643442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:21:33.643553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:33.643927Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" took 325us result status StatusSuccess 2026-02-19T08:21:33.644590Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> KqpScan::Limit >> KqpSplit::AfterResult+Ascending >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> KqpScan::LeftSemiJoinSimple >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |92.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare >> KqpSplit::ChoosePartition+Ascending >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> WebLoginServiceAudit::AuditLogCreateModifyUser [GOOD] |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |92.7%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink+UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink+UseStreamIndex >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> IndexBuildTest::CancelBuild [GOOD] >> IndexBuildTest::CancelBuildUniq >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> WebLoginServiceAudit::AuditLogLogout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> WebLoginServiceAudit::AuditLogCreateModifyUser [GOOD] Test command err: 2026-02-19T08:21:02.736606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488447796618811:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.736658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:03.173382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:03.173541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:03.220138Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:03.253350Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488447796618781:2081] 1771489262734965 != 1771489262734968 2026-02-19T08:21:03.263866Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.264209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.375011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.375029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.375037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.375144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.443031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.600486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.608740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:03.612471Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket A7338B3A830D36CE97814A8591F37BB77554A2E8A7AAF3BED73B5770E839F696 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-02-19T08:21:03.752051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:07.022843Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488467516253083:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:07.023013Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:07.047235Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:07.147826Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:07.149971Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488467516252977:2081] 1771489267015055 != 1771489267015058 2026-02-19T08:21:07.159133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:07.159189Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:07.187332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:07.193788Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:07.259113Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:07.259139Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:07.259147Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:07.259269Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:07.464643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:07.471757Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket 5EB1C985050B19FC2EFAF168AB2FDDA741DD849495FDFE28E70A827B614D5692 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-02-19T08:21:11.413408Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488486162163726:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:11.413460Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:11.440700Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:11.537364Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:11.554612Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:11.554688Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:11.572729Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:11.638300Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:11.638326Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:11.638334Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:11.638416Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:11.665786Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:11.827333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:12.423708Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:15.074090Z node 3 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login AUDIT LOG buffer(39): 2026-02-19T08:21:11.582958Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583241Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583390Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583506Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583640Z: component=schemeshard, tx_id=281474976710661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583746Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583860Z: component=schemeshard, tx_id=281474976710663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.583968Z: component=schemeshard, tx_id=281474976710664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.585838Z: component=schemeshard, tx_id=281474976710665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.586062Z: component=schemeshard, tx_id=281474976710666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.586191Z: component=schemeshard, tx_id=281474976710667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:11.586311Z: component=schemeshard, tx_id=281474976710668, remote_address={none}, subject=metadata@ ... none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.815929Z: component=schemeshard, tx_id=281474976715660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.816078Z: component=schemeshard, tx_id=281474976715661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.816478Z: component=schemeshard, tx_id=281474976715662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.816628Z: component=schemeshard, tx_id=281474976715663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.816792Z: component=schemeshard, tx_id=281474976715664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.816946Z: component=schemeshard, tx_id=281474976715665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817092Z: component=schemeshard, tx_id=281474976715666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817276Z: component=schemeshard, tx_id=281474976715667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817458Z: component=schemeshard, tx_id=281474976715668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817598Z: component=schemeshard, tx_id=281474976715669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817755Z: component=schemeshard, tx_id=281474976715670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.817990Z: component=schemeshard, tx_id=281474976715671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818146Z: component=schemeshard, tx_id=281474976715672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818292Z: component=schemeshard, tx_id=281474976715673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818471Z: component=schemeshard, tx_id=281474976715674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818629Z: component=schemeshard, tx_id=281474976715675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818797Z: component=schemeshard, tx_id=281474976715676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.818966Z: component=schemeshard, tx_id=281474976715677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.819706Z: component=schemeshard, tx_id=281474976715678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.819892Z: component=schemeshard, tx_id=281474976715679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820022Z: component=schemeshard, tx_id=281474976715680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820251Z: component=schemeshard, tx_id=281474976715681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820384Z: component=schemeshard, tx_id=281474976715682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820508Z: component=schemeshard, tx_id=281474976715683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820635Z: component=schemeshard, tx_id=281474976715684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820760Z: component=schemeshard, tx_id=281474976715685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.820891Z: component=schemeshard, tx_id=281474976715686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821019Z: component=schemeshard, tx_id=281474976715687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821131Z: component=schemeshard, tx_id=281474976715688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821261Z: component=schemeshard, tx_id=281474976715689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821383Z: component=schemeshard, tx_id=281474976715690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821508Z: component=schemeshard, tx_id=281474976715691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:31.821628Z: component=schemeshard, tx_id=281474976715692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:32.198553Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:35.747438Z: component=schemeshard, tx_id=281474976710658, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-02-19T08:21:35.876514Z: component=schemeshard, tx_id=281474976710659, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-02-19T08:21:35.918254Z: component=schemeshard, tx_id=281474976710660, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-02-19T08:21:35.955232Z: component=schemeshard, tx_id=281474976710661, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-02-19T08:21:35.998219Z: component=schemeshard, tx_id=281474976710662, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-02-19T08:21:36.082112Z: component=schemeshard, tx_id=281474976710663, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2026-02-19T08:21:36.082112Z: component=schemeshard, tx_id=281474976710663, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=MODIFY USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2026-02-19T08:21:02.587918Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488448061300239:2143];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.596892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:02.644891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:02.918169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:02.918268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:02.962152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:02.979580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.082872Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.085848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488448061300135:2081] 1771489262578414 != 1771489262578417 2026-02-19T08:21:03.184278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.221253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.221272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.221278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.221336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.440561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.449371Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:03.449438Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc655ef1250] Connect to grpc://localhost:1959 2026-02-19T08:21:03.453044Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655ef1250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-02-19T08:21:03.469023Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc655ef1250] Status 14 Service Unavailable 2026-02-19T08:21:03.469456Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:03.469501Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:03.469554Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:03.469828Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655ef1250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-02-19T08:21:03.472796Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc655ef1250] Status 14 Service Unavailable 2026-02-19T08:21:03.473084Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:03.473110Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:03.586125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:04.600889Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket **** (8E120919) 2026-02-19T08:21:04.600991Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:04.601284Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655ef1250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-02-19T08:21:04.604257Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc655ef1250] Status 14 Service Unavailable 2026-02-19T08:21:04.604392Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:04.604417Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:05.601323Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket **** (8E120919) 2026-02-19T08:21:05.601394Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:05.601760Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655ef1250] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-02-19T08:21:05.604845Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc655ef1250] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-02-19T08:21:05.605068Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:07.583555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488448061300239:2143];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:07.583678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:16.376571Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488506957727067:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:16.376633Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:16.389785Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:16.450363Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.453096Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488506957727023:2081] 1771489276375341 != 1771489276375344 2026-02-19T08:21:16.492663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.492743Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.505067Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.619951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:16.642662Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:16.642705Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:16.642713Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:16.642793Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:16.844571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.854376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:16.856962Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:16.857033Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc655f5bad0] Connect to grpc://localhost:2268 2026-02-19T08:21:16.8582 ... ERMISSION_DENIED' 2026-02-19T08:21:29.563245Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:29.563359Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:29.567386Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:29.567471Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:29.569036Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:29.574090Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488565550426247:2081] 1771489289393079 != 1771489289393082 2026-02-19T08:21:29.646619Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:21:29.646959Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:21:29.667994Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:29.670409Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:29.670447Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:29.670454Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:29.670541Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:29.857848Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:29.958079Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:29.966652Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:29.978052Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-02-19T08:21:29.978122Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc655f7cb50] Connect to grpc://localhost:27083 2026-02-19T08:21:29.979235Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655f7cb50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-02-19T08:21:29.987175Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc655f7cb50] Status 14 Service Unavailable 2026-02-19T08:21:29.987594Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:29.987632Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:29.987661Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:29.987722Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-02-19T08:21:29.988068Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655f7cb50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-02-19T08:21:29.990545Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc655f7cb50] Status 1 CANCELLED 2026-02-19T08:21:29.992623Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-02-19T08:21:29.992658Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-02-19T08:21:29.992691Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-02-19T08:21:34.229479Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:34.231925Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:34.337800Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:34.338792Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488584664924778:2081] 1771489294192510 != 1771489294192513 2026-02-19T08:21:34.352550Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:34.352667Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:34.380534Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:34.420943Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:34.447301Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:34.447332Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:34.447340Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:34.447473Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:34.735439Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:34.747519Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:34.758053Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-02-19T08:21:34.758139Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc655f73bd0] Connect to grpc://localhost:1041 2026-02-19T08:21:34.759312Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655f73bd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-02-19T08:21:34.782578Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc655f73bd0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-02-19T08:21:34.783207Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:34.784015Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:538: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-02-19T08:21:34.784401Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc655f73bd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-02-19T08:21:34.792783Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc655f73bd0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-02-19T08:21:34.794603Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> DataShardWrite::UncommittedUpdateLockMissingRow [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> WebLoginServiceAudit::AuditLogLogout [GOOD] Test command err: 2026-02-19T08:21:02.863585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488449549585906:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.863940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:03.372131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:03.403867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:03.404006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:03.433380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.475895Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.645141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.645163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.645171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.645268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.693759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.862094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:03.883944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:07.157494Z node 1 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login AUDIT LOG buffer(39): 2026-02-19T08:21:03.518628Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519190Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519413Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519593Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519747Z: component=schemeshard, tx_id=281474976710661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519848Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.519983Z: component=schemeshard, tx_id=281474976710663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520078Z: component=schemeshard, tx_id=281474976710664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520189Z: component=schemeshard, tx_id=281474976710665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520315Z: component=schemeshard, tx_id=281474976710666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520413Z: component=schemeshard, tx_id=281474976710667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520555Z: component=schemeshard, tx_id=281474976710668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520686Z: component=schemeshard, tx_id=281474976710669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520822Z: component=schemeshard, tx_id=281474976710670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.520949Z: component=schemeshard, tx_id=281474976710671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521092Z: component=schemeshard, tx_id=281474976710672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521219Z: component=schemeshard, tx_id=281474976710673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521370Z: component=schemeshard, tx_id=281474976710674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521615Z: component=schemeshard, tx_id=281474976710675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521765Z: component=schemeshard, tx_id=281474976710676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.521890Z: component=schemeshard, tx_id=281474976710677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522011Z: component=schemeshard, tx_id=281474976710678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522168Z: component=schemeshard, tx_id=281474976710679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522308Z: component=schemeshard, tx_id=281474976710680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522413Z: component=schemeshard, tx_id=281474976710681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522572Z: component=schemeshard, tx_id=281474976710682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522722Z: component=schemeshard, tx_id=281474976710683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522835Z: component=schemeshard, tx_id=281474976710684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.522968Z: component=schemeshard, tx_id=281474976710685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.523112Z: component=schemeshard, tx_id=281474976710686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.523220Z: component=schemeshard, tx_id=281474976710687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.523318Z: component=schemeshard, tx_id=281474976710688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:03.523435Z: component=schemeshard, tx_id=281474976710689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, p ... 81491Z node 26 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket **** (589A015B): Token is not in correct format 2026-02-19T08:21:37.981628Z node 26 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2026-02-19T08:21:37.982337Z node 26 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(40): 2026-02-19T08:21:33.850725Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.851086Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.851331Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.851508Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.851683Z: component=schemeshard, tx_id=281474976710661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.851843Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.852030Z: component=schemeshard, tx_id=281474976710663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.852199Z: component=schemeshard, tx_id=281474976710664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.852365Z: component=schemeshard, tx_id=281474976710665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.861842Z: component=schemeshard, tx_id=281474976710666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862191Z: component=schemeshard, tx_id=281474976710667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862359Z: component=schemeshard, tx_id=281474976710668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862509Z: component=schemeshard, tx_id=281474976710669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862661Z: component=schemeshard, tx_id=281474976710670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862806Z: component=schemeshard, tx_id=281474976710671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.862975Z: component=schemeshard, tx_id=281474976710672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863123Z: component=schemeshard, tx_id=281474976710673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863261Z: component=schemeshard, tx_id=281474976710674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863396Z: component=schemeshard, tx_id=281474976710675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863537Z: component=schemeshard, tx_id=281474976710676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863687Z: component=schemeshard, tx_id=281474976710677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.863855Z: component=schemeshard, tx_id=281474976710678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864013Z: component=schemeshard, tx_id=281474976710679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864163Z: component=schemeshard, tx_id=281474976710680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864312Z: component=schemeshard, tx_id=281474976710681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864453Z: component=schemeshard, tx_id=281474976710682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864596Z: component=schemeshard, tx_id=281474976710683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864736Z: component=schemeshard, tx_id=281474976710684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.864875Z: component=schemeshard, tx_id=281474976710685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.865007Z: component=schemeshard, tx_id=281474976710686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.865139Z: component=schemeshard, tx_id=281474976710687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.865274Z: component=schemeshard, tx_id=281474976710688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.865413Z: component=schemeshard, tx_id=281474976710689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.865545Z: component=schemeshard, tx_id=281474976710690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.869629Z: component=schemeshard, tx_id=281474976710691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:33.869914Z: component=schemeshard, tx_id=281474976710692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:34.404390Z: component=schemeshard, tx_id=281474976715657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2026-02-19T08:21:37.782177Z: component=schemeshard, tx_id=281474976715658, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE USER, paths=[/Root], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-02-19T08:21:37.978879Z: component=grpc-login, remote_address=localhost, database=/Root, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9Sb290Il0sImV4cCI6MTc3MTUzMjQ5NywiaWF0IjoxNzcxNDg5Mjk3LCJzdWIiOiJ1c2VyMSJ9.**, login_user_level=admin 2026-02-19T08:21:37.984677Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9Sb290Il0sImV4cCI6MTc3MTUzMjQ5NywiaWF0IjoxNzcxNDg5Mjk3LCJzdWIiOiJ1c2VyMSJ9.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2026-02-19T08:21:37.984677Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9Sb290Il0sImV4cCI6MTc3MTUzMjQ5NywiaWF0IjoxNzcxNDg5Mjk3LCJzdWIiOiJ1c2VyMSJ9.**, operation=LOGOUT, status=SUCCESS |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |92.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:36.975906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:37.120648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:37.142816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:37.142897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:37.143415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:37.531073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:37.531216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:37.618345Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489294029598 != 1771489294029602 2026-02-19T08:21:37.632799Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:37.681973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:37.774817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:38.103859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:38.117293Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:38.230924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> GroupWriteTest::TwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:36.764571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:36.854319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:36.873673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:36.873784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:36.874311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:37.171690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:37.171804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:37.244626Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489293982051 != 1771489293982055 2026-02-19T08:21:37.257841Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:37.306928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:37.408147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:37.746840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:37.760978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:37.886919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.065159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1135:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.065311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.065397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.066791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1150:2975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.066968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.072826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.103501Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:21:40.273658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1149:2974], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:21:40.387476Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1213:3018] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> KqpQueryDiscard::DiscardSelectSupport [GOOD] >> KqpQueryDiscard::DiscardSelectEnsureExecuted >> TTicketParserTest::AuthorizationModify [GOOD] >> KqpExplain::SqlIn [GOOD] >> KqpExplain::ReadTableRangesFullScan |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |92.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> KqpScan::Limit [GOOD] >> KqpScan::LimitOverSecondaryIndexRead >> TTicketParserTest::BulkAuthorizationModify [GOOD] |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:36.865398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:37.077174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:37.097225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:37.097303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:37.097950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:37.455718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:37.455856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:37.554322Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489293951995 != 1771489293951999 2026-02-19T08:21:37.564635Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:37.611143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:37.707347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:38.165458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:38.181763Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:38.308563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.522629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1135:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.522784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.522873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.523977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1150:2975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.524153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.529228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.557639Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:21:40.721837Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1149:2974], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:21:40.809166Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1213:3018] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:37.023882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:37.133128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:37.151124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:37.151209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:37.151809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:37.453772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:37.453948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:37.523444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489293828847 != 1771489293828851 2026-02-19T08:21:37.532767Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:37.586972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:37.681379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:38.007177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:38.022440Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:38.139597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.529240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1135:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.529386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.529480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.538915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1150:2975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.539160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.544140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.591225Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:21:40.770618Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1149:2974], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:21:40.899243Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1213:3018] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> GroupWriteTest::ByTableName >> KqpSplit::AfterResult+Ascending [GOOD] >> KqpSplit::AfterResult+Descending >> KqpScan::LeftSemiJoinSimple [GOOD] >> KqpScan::JoinWithParams |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2026-02-19T08:21:02.218832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488445802624821:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.218993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:02.494318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:02.494476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:02.531183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:02.552622Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488445802624791:2081] 1771489262216271 != 1771489262216274 2026-02-19T08:21:02.568793Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:02.575073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:02.672445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:02.672469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:02.672483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:02.672592Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:02.826049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:02.933412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:02.943248Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:02.943324Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc66c700050] Connect to grpc://localhost:14247 2026-02-19T08:21:02.948566Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c700050] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-02-19T08:21:02.965686Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c700050] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:02.967332Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1069: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-02-19T08:21:02.969195Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc66c700750] Connect to grpc://localhost:27230 2026-02-19T08:21:02.970538Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c700750] Request GetUserAccountRequest { user_account_id: "user1" } 2026-02-19T08:21:03.051504Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c700750] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-02-19T08:21:03.051985Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of login1@passport 2026-02-19T08:21:03.232285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:05.964868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:05.965051Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:06.037695Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:06.040447Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488459416683442:2081] 1771489265941417 != 1771489265941420 2026-02-19T08:21:06.077095Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:06.077167Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:06.081644Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:06.148944Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:06.148964Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:06.148969Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:06.149041Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:06.264680Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:06.373232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:06.379301Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:06.379401Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc66c721ed0] Connect to grpc://localhost:10227 2026-02-19T08:21:06.381125Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c721ed0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:06.387937Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c721ed0] Status 14 Service Unavailable 2026-02-19T08:21:06.388084Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-02-19T08:21:06.388135Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:06.388159Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:06.388409Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c721ed0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:06.389921Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c721ed0] Status 14 Service Unavailable 2026-02-19T08:21:06.390038Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-02-19T08:21:06.390070Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:06.954636Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:21:06.954690Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:06.955222Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c721ed0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:06.958208Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c721ed0] Status 14 Service Unavailable 2026-02-19T08:21:06.959096Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-02-19T08:21:06.959136Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:06.959576Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:07.955139Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:21:07.955171Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:07.955567Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c721ed0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:07.957575Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c721ed0] Status 14 Service Unavailable 2026-02-19T08:21:07.957750Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-02-19T08:21:07.957815Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:09.956079Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:21:09.956128Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:09.956391Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c721ed0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:09.960575Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c721ed0] Response AuthorizeRespons ... zation(something.read) 2026-02-19T08:21:33.857846Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:33.859700Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c7566d0] Status 16 Access Denied 2026-02-19T08:21:33.860017Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2026-02-19T08:21:33.860043Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2026-02-19T08:21:33.860666Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:33.860849Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:33.863046Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c7566d0] Status 16 Access Denied 2026-02-19T08:21:33.863369Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2026-02-19T08:21:33.863391Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2026-02-19T08:21:33.865099Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:33.865303Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-02-19T08:21:33.867429Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cc66c7566d0] Status 16 Access Denied 2026-02-19T08:21:33.868163Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-02-19T08:21:33.868198Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-02-19T08:21:33.869131Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:33.869342Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:33.871029Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7566d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:33.871411Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:33.871515Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:33.872357Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:33.872535Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-02-19T08:21:33.874179Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7566d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:33.874404Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:33.874491Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:33.875076Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2026-02-19T08:21:33.875242Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2026-02-19T08:21:33.876820Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7566d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:33.877173Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2026-02-19T08:21:33.877263Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:33.878020Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (6968D2E8) asking for AccessServiceAuthorization(something.write) 2026-02-19T08:21:33.878182Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7566d0] Request AuthorizeRequest { iam_token: "**** (6968D2E8)" permission: "something.write" resource_path { id: "123" type: "ydb.database" } resource_path { id: "folder" type: "resource-manager.folder" } } 2026-02-19T08:21:33.879786Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7566d0] Response AuthorizeResponse { subject { service_account { id: "service1" } } } 2026-02-19T08:21:33.880042Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (6968D2E8) permission something.write now has a valid subject "service1@as" 2026-02-19T08:21:33.880121Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-02-19T08:21:38.268616Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488600137713998:2260];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:38.268688Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:38.413857Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:38.419036Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:38.421465Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:38.421551Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:38.424063Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608488600137713767:2081] 1771489298222276 != 1771489298222279 2026-02-19T08:21:38.460380Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:38.582209Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:38.582231Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:38.582238Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:38.582309Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:38.712305Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:38.969274Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:38.978421Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:38.983712Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:38.983819Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cc66c7c43d0] Connect to grpc://localhost:3667 2026-02-19T08:21:38.984949Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7c43d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:39.002002Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7c43d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:39.002262Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:39.002392Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:39.003469Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:39.003562Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-02-19T08:21:39.003788Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7c43d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:39.004474Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cc66c7c43d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:39.009925Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7c43d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:39.010107Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cc66c7c43d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:39.010243Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:39.010313Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-02-19T08:21:39.010432Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:34.134045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:34.134146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:34.134236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:34.134289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:34.134337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:34.134379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:34.134410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:34.134524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:34.134599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:34.134705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:34.135583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:34.135900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:34.223211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:34.223274Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:34.235668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:34.239917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:34.240149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:34.256721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:34.259946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:34.260700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:34.261045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:34.268834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:34.269062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:34.270362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:34.270450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:34.270601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:34.270653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:34.270702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:34.270897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:34.425998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.427929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.428020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.428115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:34.428184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... BuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 4 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-02-19T08:21:42.403186Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-02-19T08:21:42.405228Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1" took 2.06ms result status StatusSuccess 2026-02-19T08:21:42.406963Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1" PathDescription { Self { Name: "index1" PathId: 33 SchemeshardId: 72075186233409549 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 32 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 34 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } } IndexImplTableDescriptions { Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 4 levels: 2 } } } } PathId: 33 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 quantity: 336, 336 unit: "request_unit", "request_unit" type: "delta", "delta" ... unblocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering >> GroupWriteTest::SimpleRdma ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2026-02-19T08:21:02.484556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488447639178078:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.485273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:02.749845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:02.756454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:02.756758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:02.812203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:02.906835Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:02.914113Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488447639178047:2081] 1771489262481305 != 1771489262481308 2026-02-19T08:21:03.100317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.159702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.159722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.159728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.159859Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.357956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.369505Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:03.369559Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce8ca8003d0] Connect to grpc://localhost:28680 2026-02-19T08:21:03.372988Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:03.385355Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ce8ca8003d0] Status 14 Service Unavailable 2026-02-19T08:21:03.385502Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:03.385544Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:03.385607Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:03.385953Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:03.390781Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ce8ca8003d0] Status 14 Service Unavailable 2026-02-19T08:21:03.390984Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:03.391020Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:03.509901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:04.499101Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:21:04.499211Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:04.499599Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:04.503047Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ce8ca8003d0] Status 14 Service Unavailable 2026-02-19T08:21:04.506058Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1211: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-02-19T08:21:04.506102Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:05.499127Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-02-19T08:21:05.499207Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:05.499444Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:05.503570Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca8003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:05.503822Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-02-19T08:21:07.484674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488447639178078:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:07.484794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:16.259915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:16.260095Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:16.270172Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.271027Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488506771310022:2081] 1771489276170080 != 1771489276170083 2026-02-19T08:21:16.278786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.278861Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.302415Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.397048Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:16.397073Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:16.397078Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:16.397142Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:16.442576Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:16.634665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.643329Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:16.643386Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce8ca881f50] Connect to grpc://localhost:1777 2026-02-19T08:21:16.644589Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca881f50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:16.656053Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca881f50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:16.656360Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:16.657004Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.list something.update something.connect) 2026-02-19T08:21:16.657301Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca881f50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.updat ... RPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce8ca932450] Connect to grpc://localhost:6452 2026-02-19T08:21:29.903193Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca932450] Request GetUserAccountRequest { user_account_id: "user1" } 2026-02-19T08:21:29.913528Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca932450] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-02-19T08:21:29.918157Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of login1@passport 2026-02-19T08:21:30.144810Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:33.774555Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608488579971094156:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:33.774596Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:33.845924Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:33.946623Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:33.947381Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488579971094126:2081] 1771489293771261 != 1771489293771264 2026-02-19T08:21:33.986449Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:33.986544Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:34.003086Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:34.145807Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:34.150378Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:34.150402Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:34.150410Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:34.150492Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:34.451008Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:34.462157Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:34.462225Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-02-19T08:21:34.462275Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce8ca88ce50] Connect to grpc://localhost:20284 2026-02-19T08:21:34.463441Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca88ce50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:34.466887Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca88ce50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:34.471107Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca88ce50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:34.472129Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ce8ca88ce50] Status 14 Service Unavailable 2026-02-19T08:21:34.472922Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:34.472984Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2026-02-19T08:21:34.473015Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:34.473056Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-02-19T08:21:34.473114Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:492: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-02-19T08:21:34.473307Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca88ce50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:34.473811Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca88ce50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-02-19T08:21:34.476214Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca88ce50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:34.476328Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ce8ca88ce50] Status 1 CANCELLED 2026-02-19T08:21:34.476399Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1466: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-02-19T08:21:34.476445Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1481: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2026-02-19T08:21:34.476467Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-02-19T08:21:38.680364Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488603990254043:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:38.694013Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:38.694215Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:38.799367Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:38.804676Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608488603990253811:2081] 1771489298601835 != 1771489298601838 2026-02-19T08:21:38.804742Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:38.833222Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:38.833317Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:38.846241Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:38.998030Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:38.998050Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:38.998059Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:38.998139Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:39.028657Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:39.327173Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:39.334721Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:21:39.339958Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-02-19T08:21:39.340012Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ce8ca8bd750] Connect to grpc://localhost:2577 2026-02-19T08:21:39.340973Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8bd750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-02-19T08:21:39.362219Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca8bd750] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:39.362645Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:39.367955Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:520: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-02-19T08:21:39.368176Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ce8ca8bd750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-02-19T08:21:39.373919Z node 7 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ce8ca8bd750] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-02-19T08:21:39.374359Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpSplit::ChoosePartition+Ascending [GOOD] >> KqpSplit::ChoosePartition+Descending >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> IndexBuildTest::CancelBuildUniq [GOOD] >> GroupWriteTest::WriteHardRateDispatcher >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |92.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> GroupWriteTest::SimpleRdma [FAIL] >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2026-02-19T08:21:02.899910Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488447695637839:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.908371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:02.944737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:03.281922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:03.282079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:03.321492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:03.330009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.354169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488447695637797:2081] 1771489262895457 != 1771489262895460 2026-02-19T08:21:03.358766Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.420613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.420646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.420655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.424429Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.604433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.624653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.629793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:03.765943Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 2026-02-19T08:21:03.774386Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:03.774430Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:03.775182Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****zNTA (F74FCA42) () has now retryable error message 'Security state is empty' 2026-02-19T08:21:03.775456Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:03.775474Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:03.775755Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****zNTA (F74FCA42) () has now retryable error message 'Security state is empty' 2026-02-19T08:21:03.775768Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:827: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-02-19T08:21:03.775781Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:827: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-02-19T08:21:03.775804Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket eyJh****zNTA (F74FCA42): Security state is empty 2026-02-19T08:21:03.909990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:05.902997Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****zNTA (F74FCA42) 2026-02-19T08:21:05.903447Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:05.903480Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:05.903802Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket eyJh****zNTA (F74FCA42) () has now retryable error message 'Security state is empty' 2026-02-19T08:21:05.903816Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:827: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-02-19T08:21:06.782022Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:07.898263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488447695637839:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:07.898367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:08.904791Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****zNTA (F74FCA42) 2026-02-19T08:21:08.905055Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:08.905075Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:08.905892Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****zNTA (F74FCA42) () has now valid token of user1 2026-02-19T08:21:08.905911Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root, A4 success 2026-02-19T08:21:14.474882Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488497096190060:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:14.484115Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:14.486051Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:14.552606Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:14.554989Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488497096190012:2081] 1771489274459934 != 1771489274459937 2026-02-19T08:21:14.578260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:14.578345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:14.591347Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:14.682231Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:14.682258Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:14.682266Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:14.682337Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:14.687235Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:14.885826Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:14.891193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:14.893112Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:14.893208Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf1d941b250] Connect to grpc://localhost:22327 2026-02-19T08:21:14.896156Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d941b250] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-02-19T08:21:14.910022Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf1d941b250] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-02-19T08:21:14.910506Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:18.667914Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488514425604072:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:18.668860Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:18.828449Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:18.830582Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:18.832620Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:18.832689Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:18.833946Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488514425603962:2081] 1771489278655418 != 1771489278655421 2026-02-19T08:21:18.874673Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:18.974440Z node 3 :NET_CLASSIFIER WARN: net_class ... eScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:23.295440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:23.302723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:23.307424Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:23.307491Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf1d949d150] Connect to grpc://localhost:24363 2026-02-19T08:21:23.308371Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d949d150] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-02-19T08:21:23.319674Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf1d949d150] Status 14 Service Unavailable 2026-02-19T08:21:23.320074Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:23.320104Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:23.320245Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d949d150] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-02-19T08:21:23.323119Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf1d949d150] Status 14 Service Unavailable 2026-02-19T08:21:23.323412Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:23.793829Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:24.784986Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket **** (8E120919) 2026-02-19T08:21:24.785038Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:24.785244Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d949d150] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-02-19T08:21:24.789479Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf1d949d150] Status 14 Service Unavailable 2026-02-19T08:21:24.789825Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:25.786511Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket **** (8E120919) 2026-02-19T08:21:25.786570Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:25.786753Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d949d150] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-02-19T08:21:25.791411Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf1d949d150] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-02-19T08:21:25.791699Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:36.263157Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488595318723371:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:36.263235Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:36.417823Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:36.554349Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:36.556004Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488595318723252:2081] 1771489296225056 != 1771489296225059 2026-02-19T08:21:36.564387Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:36.566546Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:36.582146Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:36.598405Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:36.850406Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:36.850432Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:36.850461Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:36.850546Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:37.141829Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:37.211439Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:37.218564Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:37.227540Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:37.229885Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf1d94b01d0] Connect to grpc://localhost:14170 2026-02-19T08:21:37.230891Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d94b01d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-02-19T08:21:37.265903Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf1d94b01d0] Status 14 Service Unavailable 2026-02-19T08:21:37.266182Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1899: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-02-19T08:21:37.266207Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:569: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-02-19T08:21:37.266386Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf1d94b01d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-02-19T08:21:37.285366Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf1d94b01d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-02-19T08:21:37.285901Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket **** (8E120919) () has now valid token of user1@as 2026-02-19T08:21:37.357887Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:41.539114Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608488614273335722:2251];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:41.612230Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:41.637799Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:41.744315Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488614273335483:2081] 1771489301504520 != 1771489301504523 2026-02-19T08:21:41.782014Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:41.783394Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:41.783492Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:41.798775Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:41.803430Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:41.962363Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:41.962390Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:41.962400Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:41.962481Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:42.294720Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:42.301005Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:42.302981Z node 6 :TICKET_PARSER ERROR: ticket_parser_impl.h:980: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported 2026-02-19T08:21:42.542023Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink+UseStreamIndex [GOOD] >> KqpLimits::AffectedShardsLimit >> KqpNotNullColumns::UpdateNotNullPk >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpResultSetFormats::ArrowFormat_Types_String |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |92.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] >> KqpResultSetFormats::ArrowFormat_EmptyBatch |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |92.7%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> KqpLimits::QueryExecTimeout [GOOD] >> KqpResultSetFormats::ArrowFormat_Simple >> KqpResultSetFormats::DefaultFormat >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::SimpleRdma [FAIL] Test command err: RandomSeed# 38739764278513713 2026-02-19T08:21:46.915495Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:46.944104Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:46.944205Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:46.947057Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 received TEvBlockResult {Status# OK} assertion failed at ydb/core/load_test/ut/group_test_ut.cpp:40, auto (anonymous namespace)::TTetsEnv::RunSingleLoadTest(const TString &, bool)::(anonymous class)::operator()(ui32, std::unique_ptr &) const: (!memReg.Empty()) unable to extract mem region from chunk TBackTrace::Capture()+28 (0x107CE99C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x10CBC9AC) ??+0 (0x1042BC8D) void NKikimr::TTestActorSystem::Sim, std::__y1::allocator> const&)::'lambda'()>(NKikimr::TTestActorSystem::WaitForEdgeActorEvent(std::__y1::set, std::__y1::allocator> const&)::'lambda'()&&, std::__y1::function)+3731 (0x10415FE3) NKikimr::TTestActorSystem::WaitForEdgeActorEvent(std::__y1::set, std::__y1::allocator> const&)+1145 (0x10414549) TAutoPtr, TDelete> TEnvironmentSetup::WaitForEdgeActorEvent(NActors::TActorId const&, bool, TInstant)+1728 (0x1042A920) ??+0 (0x103A5AA9) NTestSuiteGroupWriteTest::TTestCaseSimpleRdma::Execute_(NUnitTest::TTestContext&)+1008 (0x103A8490) std::__y1::__function::__func, void ()>::operator()()+280 (0x103B8C78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x10CEACBA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x10CC3A48) NTestSuiteGroupWriteTest::TCurrentTest::Execute()+1286 (0x103B7D56) NUnitTest::TTestFactory::Execute()+2176 (0x10CC5200) NUnitTest::RunMain(int, char**)+5805 (0x10CE4DCD) ??+0 (0x7FE8046D0D90) __libc_start_main+128 (0x7FE8046D0E40) _start+41 (0xDD9A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 17877436821404144594 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 >> KqpScanArrowFormat::AllTypesColumns >> KqpQueryDiscard::DiscardSelectEnsureExecuted [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile >> KqpScan::JoinWithParams [GOOD] >> KqpScan::LMapFunction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: 2026-02-19T08:19:47.477863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488125861606526:2208];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:47.478318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:47.521685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:47.849594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:47.849672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:47.877335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:47.897069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:47.987440Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:48.073193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:48.077266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:48.077293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:48.077309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:48.077403Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:48.485894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:48.665847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:48.752650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:48.917733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:49.093432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:49.173645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.285170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488143041477387:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:51.285269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:51.286038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488143041477397:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:51.286105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:51.735252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.781180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.843157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.888132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:51.945991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.006007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.076533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.169584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:52.295721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147336445562:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.295800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.296387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147336445567:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.296432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488147336445568:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.296570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:52.300139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:52.318981Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488147336445571:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:19:52.403255Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488147336445622:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:52.480376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488125861606526:2208];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:52.480437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:54.476702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ... 9Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:13.199793Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:13.199901Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:13.200340Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:13.202625Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488493106619997:2081] 1771489273060996 != 1771489273060999 2026-02-19T08:21:13.249110Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:13.256513Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:13.303955Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:13.303984Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:13.303994Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:13.304093Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:13.868442Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:13.879971Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:13.886857Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:13.993443Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:14.131124Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:14.197365Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:14.279392Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.663273Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488510286491066:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.663390Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.664092Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488510286491076:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.664166Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:17.754296Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.848318Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.935048Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.985501Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:18.030724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:18.063472Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488493106620027:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:18.063547Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:18.078455Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:18.128508Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:18.188709Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:18.288451Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488514581459249:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:18.288731Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:18.288738Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488514581459254:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:18.289192Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488514581459256:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:18.289299Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:18.294648Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:18.310241Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488514581459257:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:18.399500Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488514581459309:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:28.185427Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:28.185451Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:36.127363Z node 5 :KQP_PROXY WARN: kqp_proxy_service.cpp:1336: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=MTkxODVkZWQtN2MzYWM3NWQtNDU1YjdlZDItZjk2NDgxMjk= }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=MTkxODVkZWQtN2MzYWM3NWQtNDU1YjdlZDItZjk2NDgxMjk= >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> KqpSplit::AfterResult+Descending [GOOD] >> KqpScan::LimitOverSecondaryIndexRead [GOOD] >> KqpScan::Like >> KqpSplit::ChoosePartition+Descending [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2026-02-19T08:21:02.709062Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488446065188492:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:02.709122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:02.981806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:03.105268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:03.105360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:03.188085Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:03.189504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:03.193465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488446065188461:2081] 1771489262707478 != 1771489262707481 2026-02-19T08:21:03.263995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:03.347581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:03.347604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:03.347611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:03.347690Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:03.567907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:03.574060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:03.621023Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:03.632157Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:03.632187Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:03.633133Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****6oBg (02F427A1) () has now valid token of user1 2026-02-19T08:21:03.633148Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root, A4 success 2026-02-19T08:21:03.721217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:06.636776Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488465981462961:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:06.636855Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:06.648910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:06.716748Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:06.718344Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488465981462928:2081] 1771489266633624 != 1771489266633627 2026-02-19T08:21:06.749343Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:06.749487Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:06.763041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:06.821597Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:06.821623Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:06.821630Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:06.821732Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:06.889327Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:07.057989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:07.070213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:21:07.243753Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:07.252368Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:07.252401Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:07.253452Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****460w (6DE46514) () has now valid token of user1 2026-02-19T08:21:07.253481Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root, A4 success 2026-02-19T08:21:10.340678Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488482653535707:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:10.340730Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:10.355326Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:10.417322Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:10.419412Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488482653535672:2081] 1771489270339883 != 1771489270339886 2026-02-19T08:21:10.456202Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:10.456282Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:10.461180Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:10.502999Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:10.503020Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:10.503028Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:10.503087Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:10.639446Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:10.695350Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:10.832986Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-02-19T08:21:10.833022Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-02-19T08:21:10.833079Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:875: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****2OSw (D54DA140)) 2026-02-19T08:21:11.346845Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:11.832891Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root/Db1 keys 1 2026-02-19T08:21:11.832916Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1558: Handle deferred tokens for database: /Root/Db1 2026-02-19T08:21:11.833182Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-02-19T08:21:11.833205Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-02-19T08:21:11.834154Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****2OSw (D54DA140) () has now valid token of user1 2026-02-19T08:21:11.834179Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root/Db1, A4 success 2026-02-19T08:21:11.899121Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-02-19T08:21:11.899160Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-02-19T08:21:11.899203Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:875: CanInitLoginToken, database /Root/Db2, login state is not available yet, deffer token (eyJh****KPfw (6157544B)) 2026-02-19T08:21:12.899181Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Upd ... 2Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:13.870407Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608488493677123607:2081] 1771489273767787 != 1771489273767790 2026-02-19T08:21:13.912584Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:13.912685Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:13.920212Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:13.977242Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:13.977269Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:13.977277Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:13.977350Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:14.010495Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:14.225705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:14.411161Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:14.419063Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:14.419098Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:14.419761Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****S4xA (E7D9F919) () has now valid token of user1 2026-02-19T08:21:14.419811Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root, A4 success 2026-02-19T08:21:14.420509Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:14.777908Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:18.773849Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488493677123653:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:18.773950Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:18.789949Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****S4xA (E7D9F919) 2026-02-19T08:21:18.794456Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****S4xA (E7D9F919) () has now valid token of user1 2026-02-19T08:21:23.806005Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****S4xA (E7D9F919) 2026-02-19T08:21:23.806448Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****S4xA (E7D9F919) () has now valid token of user1 2026-02-19T08:21:24.427826Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:27.814385Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****S4xA (E7D9F919) 2026-02-19T08:21:27.814722Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****S4xA (E7D9F919) () has now valid token of user1 2026-02-19T08:21:28.857023Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:28.857053Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:32.830117Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****S4xA (E7D9F919) 2026-02-19T08:21:32.830557Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****S4xA (E7D9F919) () has now valid token of user1 2026-02-19T08:21:35.306959Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488588814107719:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:35.307487Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:35.380523Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:35.479501Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:35.479582Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:35.498850Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:35.516239Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:35.585204Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:35.713094Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:35.713121Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:35.713130Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:35.713218Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:35.991092Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:36.000422Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:36.081887Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:36.088149Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:806: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-02-19T08:21:36.088184Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:811: CanInitLoginToken, target database candidates(1): /Root 2026-02-19T08:21:36.088909Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1881: Ticket eyJh****-Azw (2AC07D2A) () has now valid token of user1 2026-02-19T08:21:36.088926Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:854: CanInitLoginToken, database /Root, A4 success 2026-02-19T08:21:36.089424Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:36.294873Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:40.309991Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488588814107719:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:40.310077Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:41.306325Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****-Azw (2AC07D2A) 2026-02-19T08:21:41.306637Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1912: Ticket eyJh****-Azw (2AC07D2A) () has now permanent error message 'User not found' 2026-02-19T08:21:45.312151Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1596: Refreshing ticket eyJh****-Azw (2AC07D2A) 2026-02-19T08:21:47.790054Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:47.790239Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:47.897657Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:47.902074Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608488641230186619:2081] 1771489307652849 != 1771489307652852 2026-02-19T08:21:47.920121Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:47.920226Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:47.948830Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:47.955649Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:48.092081Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:48.092101Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:48.092108Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:48.092184Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:48.486584Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:48.601435Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1554: Updated state for /Root keys 1 2026-02-19T08:21:48.622763Z node 6 :TICKET_PARSER ERROR: ticket_parser_impl.h:988: Ticket **** (00000000): Ticket is empty 2026-02-19T08:21:48.720828Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |92.7%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.7%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Descending [GOOD] Test command err: 2026-02-19T08:21:36.312193Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488593618282931:2211];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:36.312278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:36.682766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:36.682926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:36.735594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:36.773850Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488593618282749:2081] 1771489296302008 != 1771489296302011 2026-02-19T08:21:36.794251Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:36.799415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:36.929860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:37.022590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:37.022613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:37.022636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:37.022715Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:37.322498Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:37.516947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:37.534551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:37.597069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.768901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.967173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:38.113845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.149256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488610798153806:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.149370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.149722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488610798153816:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.149751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.463150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.502568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.543179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.584650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.641408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.683509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.719031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.805072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.909384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488610798154693:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.909476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.909802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488610798154698:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.909843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488610798154699:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.909947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.913530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.924942Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488610798154702:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:40.979773Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488610798154753:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:41.311425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488593618282931:2211];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:41.311486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 S ... /migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:44.687543Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:44.688126Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:44.689353Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488626444983213:2081] 1771489304512577 != 1771489304512580 2026-02-19T08:21:44.707034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:44.707117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:44.729392Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:44.912564Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:44.912589Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:44.912598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:44.912690Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:44.965398Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:45.541595Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:45.695347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:45.701063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:21:45.708416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:45.892114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:46.060390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:46.139124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.361858Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488647919821565:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.361986Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.369260Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488647919821575:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.369376Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.484012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.535198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.537927Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608488626444983397:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:49.538082Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:49.581771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.636480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.684346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.741259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.820904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.917545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.186237Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488652214789740:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.186318Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.186780Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488652214789745:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.186819Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488652214789746:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.186935Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.191965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:50.222486Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608488652214789749:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:21:50.321397Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608488652214789800:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:21:53.183528Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489312705, txId: 281474976715673] shutting down |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::ChoosePartition+Descending [GOOD] Test command err: 2026-02-19T08:21:38.023313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488603858971717:2235];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:38.023872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:38.185221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:38.548549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:38.548695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:38.559937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:38.587081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:38.649898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488599564004214:2081] 1771489298005737 != 1771489298005740 2026-02-19T08:21:38.651606Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:38.734298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:38.734321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:38.734327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:38.734394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:38.777296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:39.028126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:39.264228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:39.342231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:39.517466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:39.724840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:39.806498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:41.755544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488616743875281:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:41.755661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:41.756089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488616743875291:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:41.756131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.139897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.170281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.207050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.240573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.290654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.327573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.368237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.427568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:42.571997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488621038843464:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.572102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.572383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488621038843469:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.572417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488621038843470:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.572534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:42.576688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:42.603769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488621038843473:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:42.682736Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488621038843524:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:43.024035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488603858971717:2235];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:43.024099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 ... --------------------------------- 2026-02-19T08:21:45.434978Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489305005, txId: 281474976710673] shutting down 2026-02-19T08:21:46.545858Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:46.546033Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:46.560386Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:46.563679Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488636573741307:2081] 1771489306331179 != 1771489306331182 2026-02-19T08:21:46.588405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:46.588491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:46.601036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:46.714906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:46.715404Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:46.715413Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:46.715420Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:46.715489Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:47.301849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:47.307714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:47.317209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:47.449229Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:47.517069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:47.728186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:47.847198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.449825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488653753612369:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.449931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.450234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488653753612379:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.450276Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:50.522838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.555867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.605312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.642850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.681159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.762083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.821481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.893524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:51.039841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488658048580550:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:51.039956Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:51.040271Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488658048580556:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:51.040298Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488658048580555:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:51.040357Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:51.044089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:51.056265Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608488658048580559:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:51.133789Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608488658048580610:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:21:53.616559Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489313244, txId: 281474976710673] shutting down >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] 2026-02-19T08:21:00.148187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:21:00.148265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:00.148308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:21:00.148345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:21:00.148381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:21:00.148417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:21:00.148447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:21:00.148475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:21:00.148540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:21:00.148615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:21:00.149429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:00.149759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:21:00.272334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:21:00.272413Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:00.273254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:21:00.282429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:21:00.282532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:21:00.282658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:21:00.293154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:21:00.293356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:21:00.293919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:21:00.294259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:21:00.297435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:00.297567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:21:00.298575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:21:00.298621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:21:00.298709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:21:00.298747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:21:00.298791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:21:00.298963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:00.433384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.434748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: ... RD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-02-19T08:21:53.845303Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-02-19T08:21:53.845447Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-02-19T08:21:53.845489Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-02-19T08:21:53.845538Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-02-19T08:21:53.845574Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-02-19T08:21:53.845616Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-02-19T08:21:53.845703Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:1378:3205] message: TxId: 281474976710757 2026-02-19T08:21:53.845781Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-02-19T08:21:53.845832Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-02-19T08:21:53.845868Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710757:0 2026-02-19T08:21:53.846011Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:21:53.846060Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-02-19T08:21:53.846087Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710757:1 2026-02-19T08:21:53.846121Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:21:53.846149Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-02-19T08:21:53.846171Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710757:2 2026-02-19T08:21:53.846232Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:21:53.847077Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-02-19T08:21:53.847195Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:21:53.847244Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:21:53.847329Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:21:53.847377Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:21:53.847420Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:21:53.847832Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-02-19T08:21:53.849625Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:21:53.849862Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-02-19T08:21:53.849924Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-02-19T08:21:53.852692Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:21:53.852841Z node 16 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-02-19T08:21:54.617876Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:54.618201Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 357us result status StatusPathDoesNotExist 2026-02-19T08:21:54.618425Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:21:54.619135Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:54.620064Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 245us result status StatusPathDoesNotExist 2026-02-19T08:21:54.620244Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:21:54.620901Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:54.621169Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 275us result status StatusSuccess 2026-02-19T08:21:54.621684Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> GroupWriteTest::TwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuildUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:18:29.666182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:18:29.666268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.666342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:18:29.666402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:18:29.666451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:18:29.666499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:18:29.666530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:18:29.666600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:18:29.666689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:18:29.666788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:18:29.667729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:18:29.668093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:18:29.771277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:18:29.771341Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:18:29.795898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:18:29.810122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:18:29.810320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:18:29.821663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:18:29.822358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:18:29.823142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:18:29.823415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:18:29.826476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.826638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:18:29.827894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:18:29.827973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:18:29.828152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:18:29.828206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:18:29.828249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:18:29.828460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:18:29.973782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.974845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.974963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:18:29.975839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... fy, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-02-19T08:21:46.191793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:127:2152] message: TxId: 281474976710760 2026-02-19T08:21:46.191846Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:21:46.191884Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-02-19T08:21:46.191919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710760:0 2026-02-19T08:21:46.192013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-02-19T08:21:46.194597Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-02-19T08:21:46.194673Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710760 2026-02-19T08:21:46.194761Z node 7 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-02-19T08:21:46.194905Z node 7 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [7:1582:3444], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-02-19T08:21:46.197310Z node 7 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-02-19T08:21:46.197496Z node 7 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [7:1582:3444], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:21:46.197590Z node 7 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-02-19T08:21:46.199875Z node 7 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-02-19T08:21:46.200015Z node 7 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [7:1582:3444], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:21:46.200053Z node 7 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-02-19T08:21:46.200166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:21:46.200219Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:1681:3532] TestWaitNotification: OK eventTxId 102 2026-02-19T08:21:46.204500Z node 7 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-02-19T08:21:46.204763Z node 7 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-02-19T08:21:46.208728Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:21:46.208978Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 271us result status StatusSuccess 2026-02-19T08:21:46.209515Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:21:46.213520Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:21:46.213775Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 238us result status StatusPathDoesNotExist 2026-02-19T08:21:46.213968Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ColumnShardTiers::TieringUsage |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |92.8%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 809653930422246089 2026-02-19T08:21:43.789644Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:43.789803Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:43.843802Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:43.843899Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:43.844039Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:43.844074Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:43.849315Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:21:43.849445Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:21:43.894217Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:43.894364Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:43.899435Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:21:43.899544Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:21:57.887795Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet received PoisonPill, going to die 2026-02-19T08:21:57.887898Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:57.887968Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:57.925949Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2026-02-19T08:21:57.926054Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne >> KqpScan::LMapFunction [GOOD] >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |92.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 11830800051909309899 2026-02-19T08:21:59.831976Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832093Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832134Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832169Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832222Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832259Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832322Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.832361Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.833730Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.833830Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.833898Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.833949Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.834016Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.834068Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.834122Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.834189Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.834273Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834349Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834390Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834426Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834476Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834521Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834564Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.834597Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:21:59.836566Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836641Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836697Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836743Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836787Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836833Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836898Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:21:59.836944Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:22:00.031488Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2026-02-19T08:22:00.031568Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2026-02-19T08:22:00.031616Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2026-02-19T08:22:00.031660Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2026-02-19T08:22:00.031703Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2026-02-19T08:22:00.031763Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2026-02-19T08:22:00.031806Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ValueFormat_Simple |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2026-02-19T08:13:40.634976Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:40.659258Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:40.660283Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:40.660361Z node 2 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:1:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:40.660437Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:40.660751Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:40.661667Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2026-02-19T08:13:40.661703Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:40.661815Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:40.661925Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:40.672553Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:40.672617Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:40.674992Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675170Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675296Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675452Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675563Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675672Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675796Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.675839Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:40.675941Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2026-02-19T08:13:40.675971Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2026-02-19T08:13:40.676033Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:40.676091Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:40.676383Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:356} Bootstrap 2026-02-19T08:13:40.679147Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-02-19T08:13:40.679609Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-02-19T08:13:40.679679Z node 1 :BS_NODE DEBUG: {NW102@node_warden_pdisk.cpp:146} Inferring PDisk slot count not configured Path# SectorMap:0:3200 SlotCount# 0 SlotSizeInUnits# 0 2026-02-19T08:13:40.679732Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:256} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} ExpectedSlotCount# 0 SlotSizeInUnits# 0 Temporary# false 2026-02-19T08:13:40.679982Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:70} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-02-19T08:13:40.681145Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:301} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DDisk# false VDiskServiceId# [1:5980451682:0] 2026-02-19T08:13:40.681211Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-02-19T08:13:40.682199Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2026-02-19T08:13:40.682235Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-02-19T08:13:40.682304Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:327} StartInvalidGroupProxy GroupId# 4294967295 2026-02-19T08:13:40.682407Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:339} StartRequestReportingThrottler 2026-02-19T08:13:40.695099Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-02-19T08:13:40.695170Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-02-19T08:13:40.697071Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697227Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697355Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697471Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697599Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697734Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697906Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2026-02-19T08:13:40.697940Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-02-19T08:13:40.698015Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2026-02-19T08:13:40.698045Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2026-02-19T08:13:40.698082Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-02-19T08:13:40.698120Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-02-19T08:13:40.699330Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2026-02-19T08:13:40.699410Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.699629Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:40.699825Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:40.700051Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:40.700085Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.700656Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-02-19T08:13:40.700753Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:40.700917Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.700946Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-02-19T08:13:40.742069Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.744167Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:40.744359Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2092] 2026-02-19T08:13:40.744403Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2092] 2026-02-19T08:13:40.744525Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-02-19T08:13:40.744838Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:373: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-02-19T08:13:40.745052Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:13:40.745181Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2026-02-19T08:13:40.745216Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-02-19T08:13:40.745245Z node 1 :BS_NODE DEBUG: {NWDC32@distconf ... for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074428Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65556.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074458Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65555.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074487Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65554.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074517Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65553.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074545Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65552.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074573Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65551.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074602Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65550.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074628Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65549.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074656Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65548.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074685Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65547.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074716Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65546.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074743Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65545.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074770Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65544.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074800Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65543.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074825Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65542.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074848Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65541.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074871Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65540.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.074898Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65539.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.076284Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65538.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.076364Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65537.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.076395Z node 1 :HIVE WARN: hive_impl.cpp:573: HIVE#72057594037927937 The tablet Unknown.65536.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-02-19T08:19:11.076930Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} queued, type NKikimr::NHive::TTxProcessPendingOperations 2026-02-19T08:19:11.077011Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:19:11.077123Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:19:11.077200Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:19:11.077349Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:515:2352] 2026-02-19T08:19:11.077386Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:515:2352] 2026-02-19T08:19:11.077505Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:932: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [1:146:2124] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0:(undefined)} 2026-02-19T08:19:11.077556Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:713: SelectForward node: 1 selfDC: 1 leaderDC: 1 resolveFlags: 1:2:0:(undefined) local: 1 localDc: 1 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 1 allowFollowers: 0 winner: [1:146:2124] 2026-02-19T08:19:11.077684Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594037927937 Active! Generation: 3, Type: Hive started in 21msec Marker# TSYS24 2026-02-19T08:19:11.077752Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72057594037927937] Activate 2026-02-19T08:19:11.077926Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:515:2352] 2026-02-19T08:19:11.078147Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [1:99:2092] 2026-02-19T08:19:11.078196Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [1:99:2092] 2026-02-19T08:19:11.078237Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [1:99:2092] 2026-02-19T08:19:11.078305Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [1:97:2092] EventType# 268959744 2026-02-19T08:19:11.078357Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [1:515:2352] 2026-02-19T08:19:11.078402Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037932033]::SendEvent [1:515:2352] 2026-02-19T08:19:11.078550Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [1:515:2352] 2026-02-19T08:19:11.078828Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [1:515:2352] 2026-02-19T08:19:11.078867Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [1:515:2352] 2026-02-19T08:19:11.078892Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037932033] push event to server [1:515:2352] 2026-02-19T08:19:11.078937Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [1:432:2281] EventType# 268637702 2026-02-19T08:19:11.079051Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [2:102:2089] 2026-02-19T08:19:11.079083Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [2:102:2089] 2026-02-19T08:19:11.079108Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594037927937] push event to server [2:102:2089] 2026-02-19T08:19:11.079202Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72057594037927937]::SendEvent [2:102:2089] 2026-02-19T08:19:11.079386Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-02-19T08:19:11.079448Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:19:11.079662Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{22, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:19:11.079727Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:19:11.079916Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [2:101:2089] EventType# 268959744 2026-02-19T08:19:11.080035Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-02-19T08:19:11.080088Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:19:11.080249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:11.080365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:11.080451Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{10, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-02-19T08:19:11.080513Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:19:11.080830Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-02-19T08:19:11.080899Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:19:11.081010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:11.081066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:11.081121Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{11, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-02-19T08:19:11.081160Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:19:11.081319Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-02-19T08:19:11.081370Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:19:11.081443Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{12, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:19:11.081492Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} Took 44.69263 seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:51.844158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:51.999115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:52.022539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:52.022613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:52.023105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:52.361599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:52.369866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:52.493704Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489188286648 != 1771489188286652 2026-02-19T08:19:52.516160Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:52.582849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:52.744659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:53.274427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:53.274488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:53.274523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:53.275004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:53.287388Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:53.821305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:53.836758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:19:53.933468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.308727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:54.776326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:55.145312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.121431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.121610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.122689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.122955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:56.154350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.361434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.659048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.015384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.324888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:57.702329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.086506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:58.609799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:59.131767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.131873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.132184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.132257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.132334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:59.137588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:59.320514Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2804:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:19:59.391357Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2864:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:20:01.332523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, a ... tion: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem, At lambda
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem, At lambda
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem, At lambda
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2026-02-19T08:21:48.045369Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:48.121816Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:48.185818Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:48.330501Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:48.333586Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608488641219673795:2081] 1771489307950518 != 1771489307950521 2026-02-19T08:21:48.366335Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:48.366440Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:48.389554Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:48.645950Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:48.706391Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:48.706416Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:48.706427Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:48.706514Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:49.021943Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:49.475546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:49.497388Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.600497Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.071216Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.187471Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.795010Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488666989479466:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.795095Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.795395Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488666989479476:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.795437Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.882010Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.937272Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.004951Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.099683Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.149350Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.209259Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.264185Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.332005Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.444934Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488671284447651:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.445022Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.446069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488671284447656:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.446105Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488671284447657:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.446152Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.451550Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:54.463789Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608488671284447660:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:54.534744Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608488671284447711:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |92.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::LMapFunction [GOOD] Test command err: 2026-02-19T08:21:36.244037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488593887774627:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:36.244322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:36.616442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:36.648574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:36.650300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:36.737318Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488593887774516:2081] 1771489296223577 != 1771489296223580 2026-02-19T08:21:36.741778Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:36.750794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:36.949019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:36.978463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:36.978491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:36.978501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:36.978601Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:37.243166Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:37.507363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:37.514381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:37.579832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.761704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.974020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:38.156908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.136668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488611067645579:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.136788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.137258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488611067645589:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.137306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.509574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.549483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.609743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.641729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.720140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.770860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.830829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.888614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.967260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488611067646464:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.967311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.967681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488611067646469:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.967697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488611067646470:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.967720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.971003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.984790Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488611067646473:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:41.083917Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488615362613820:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:41.237877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488593887774627:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:41.237946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:42.748905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... rd__operation_create_table.cpp:689) 2026-02-19T08:21:49.408997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.485735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:49.604124Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488647421725592:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.604217Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.604516Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488647421725597:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.604567Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488647421725598:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.604653Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:49.609110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:49.637437Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608488647421725601:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:49.705006Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608488647421725652:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:49.737854Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608488625946886555:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:49.737928Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:52.388609Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489312418, txId: 281474976710673] shutting down 2026-02-19T08:21:52.652966Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489312677, txId: 281474976710675] shutting down 2026-02-19T08:21:54.033921Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:54.034127Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:54.110032Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488667335836604:2081] 1771489313907616 != 1771489313907619 2026-02-19T08:21:54.154336Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:54.156077Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:54.156153Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:54.209543Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:54.269822Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:54.394485Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:54.394511Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:54.394521Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:54.394636Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:54.982121Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:55.191815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:58.809288Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810673979:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.809505Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.810931Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810673998:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.810981Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.811173Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810674000:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.811201Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.817105Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.865314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810674083:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.865430Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.865518Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810674088:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.865666Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488688810674090:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.865743Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.870399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:58.883027Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608488688810674091:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:21:58.967796Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608488688810674143:2599] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:59.320446Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489319355, txId: 281474976710662] shutting down [[[2];[1000];["Dogecoin"]];[[4];[1];["XTC"]];[[5];[2];["Cardano"]];[[6];[3];["Tether"]]] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: 2026-02-19T08:21:18.707009Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488515109919395:2182];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:18.707074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:18.738795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:19.068065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:19.068182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:19.069448Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488515109919237:2081] 1771489278690036 != 1771489278690039 2026-02-19T08:21:19.082818Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:19.086773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:19.088073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:19.225233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:19.225250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:19.225696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:19.225861Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:19.313780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:19.679368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:19.699699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:19.708305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:19.764243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:19.908853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:20.098014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:20.165785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.314644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488532289790300:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.314809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.315212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488532289790310:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.315267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.653682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.702773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.737132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.777225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.846534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.884629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.927247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.975660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.079402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488536584758480:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.079488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.079878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488536584758485:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.079944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488536584758486:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.080061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.085411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:23.100969Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488536584758489:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:23.204343Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488536584758540:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:23.710998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488515109919395:2182];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:23.711312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... ected 2026-02-19T08:21:48.935451Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:49.006390Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:49.006417Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:49.006432Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:49.006524Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:49.525868Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:49.824812Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:49.835252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:49.856908Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.020357Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.266760Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:50.404071Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.505833Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488647330532831:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:53.505910Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:54.053877Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488673100338446:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.053982Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.057870Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488673100338456:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.057973Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.139954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.200338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.251525Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.300026Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.345480Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.415374Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.462382Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.532530Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.644219Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488673100339326:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.644310Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.644607Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488673100339331:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.644656Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488673100339332:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.644773Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.651393Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:54.671044Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608488673100339335:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:54.753929Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608488673100339386:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:57.859026Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.202405Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.578885Z node 4 :KQP_EXECUTER WARN: {KQPDATA@kqp_data_executer.cpp:2075} ActorId: [4:7608488694575178182:2690] TxId: 281474976710676. Ctx: { TraceId: 01khtfvp6q35xgtqbv7pwy8zze, Database: /Root, SessionId: ydb://session/3?node_id=4&id=MWEwNWNlZDQtNmNiODU3NWQtZGUwOGE1MDktZTgzYjIzNTU=, PoolId: default, IsStreamingQuery: 0}. Too many affected shards datashard_tasks# 24 limit# 23 trace_id# 2026-02-19T08:21:59.579317Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=MWEwNWNlZDQtNmNiODU3NWQtZGUwOGE1MDktZTgzYjIzNTU=, ActorId: [4:7608488690280210727:2690], ActorState: ExecuteState, LegacyTraceId: 01khtfvp6q35xgtqbv7pwy8zze, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Affected too many shards: 0" issue_code: 2029 severity: 1 } trace_id#
: Error: Affected too many shards: 0, code: 2029 |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |92.8%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> DataStreams::TestPutRecordsOfAnauthorizedUser >> KqpScan::Like [GOOD] >> DataStreams::TestGetShardIterator >> DataStreams::TestUpdateStream >> DataStreams::TestDeleteStream >> DataStreams::TestControlPlaneAndMeteringData >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery >> DataStreams::TestGetRecordsStreamWithSingleShard |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> DataStreams::TestUpdateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Like [GOOD] Test command err: 2026-02-19T08:21:35.484655Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488587299332902:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:35.485295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:35.823343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:35.823456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:35.873591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:35.902601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488587299332860:2081] 1771489295481453 != 1771489295481456 2026-02-19T08:21:35.929261Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:35.934510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:36.057543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:36.057565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:36.057571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:36.057644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:36.161894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:36.506099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:36.565344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:36.586422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:36.657074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:36.872127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.087975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:37.191722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:39.483025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488604479203925:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:39.492912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:39.514657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488604479203935:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:39.514762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.101494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.158465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.224239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.298303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.348315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.389061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.428980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.486886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488587299332902:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:40.486938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:40.490992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:40.635002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488608774172107:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.635126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.635547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488608774172112:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.635584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488608774172113:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.635708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:40.639121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:40.655230Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488608774172116:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:40.749467Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488608774172167:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:42.746680Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489302779, txId: 281474976710673] shutting down 2026-02-1 ... 7307199536658146131:7762515]; 2026-02-19T08:21:54.625966Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:54.666216Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:54.745797Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:54.881743Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:54.967168Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:54.967243Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:54.968607Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:55.026689Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:55.075403Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:55.075426Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:55.075434Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:55.075516Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:55.626456Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:55.663336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:55.677231Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:55.767096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:55.929068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.207928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.498235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.361866Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488694409467434:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:59.361982Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:59.366228Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488694409467444:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:59.366352Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:59.436273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.486337Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.537774Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.588472Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.630274Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608488672934629283:2211];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:59.630549Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:59.640437Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.688033Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.771853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:59.874000Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:00.034907Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488698704435615:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.035055Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.037895Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488698704435620:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.037997Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608488698704435621:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.038239Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.042539Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:00.057295Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608488698704435624:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:00.113204Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608488698704435675:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:02.034600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit |92.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> TKeyValueTest::TestRewriteThenLastValue |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataStreams::TestGetShardIterator [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> DataShardWrite::DistributedInsertWithoutLocks-Volatile >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestUpdateStream [GOOD] >> KqpQuery::TableSinkWithSubquery [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> GroupWriteTest::Simple [GOOD] |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |92.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |92.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> DataStreams::TestUpdateStorage [GOOD] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> KqpLimits::ManyPartitions [GOOD] >> DataShardOutOfOrder::UncommittedReads >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] >> Cache::Test1 [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpScanArrowFormat::SingleKey >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> DataStreams::Test_AutoPartitioning_Describe >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 >> DataStreams::TestStreamTimeRetention >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> KqpQueryDiscard::DiscardSelectMultiLine >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> DataStreams::TestPutRecordsWithRead >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> KqpScanArrowFormat::SingleKey [GOOD] >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataShardWrite::LocksBrokenStats >> KqpAnalyze::AnalyzeError >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::ChangeBetweenRetentionModes >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> KqpLimits::ManyPartitionsSorting >> DataShardOutOfOrder::UncommittedReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 17476639660609417693 >> DataShardWrite::LocksBrokenStats [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic >> DataShardWrite::TliLocksBrokenByWrite >> KqpScanArrowFormat::JoinWithParams >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> KqpResultSetFormats::ArrowFormat_LargeTable >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> DataStreams::TestCreateExistingStream >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 12323514179475946022 2026-02-19T08:21:46.163361Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:46.187943Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:46.188011Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:46.190682Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:21:46.205425Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:46.208137Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 received TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:22:11.749390Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet received PoisonPill, going to die 2026-02-19T08:22:11.749492Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:22:11.812411Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 received TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> KqpNotNullColumns::UpdateTable_Immediate >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always >> KqpResultSetFormats::ArrowFormat_Types_Binary >> DataStreams::TestStreamTimeRetention [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> PhantomBlobs::TestEnabling [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestCreateExistingStream [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestUnsupported >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> DataStreams::ListStreamsValidation >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile >> DataStreams::TestPutRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:20:59.793534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:20:59.793635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:59.793768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:20:59.793827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:20:59.793869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:20:59.793909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:20:59.793940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:20:59.793981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:20:59.794051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:20:59.794118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:20:59.795073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:59.795386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:20:59.941173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:20:59.941273Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:59.942270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:20:59.961090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:20:59.961880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:20:59.962072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:20:59.969397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:20:59.969674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:20:59.970516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:20:59.970782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:20:59.973064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:59.973243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:20:59.974597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:20:59.974661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:20:59.974921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:20:59.974982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:20:59.975028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:20:59.975136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:21:00.137621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.138728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.138923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:21:00.139920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: ... 83647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "3_continuousBackupImpl" TopicPath: "/MyRoot/Table/3_continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409555 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409556 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 50 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:22:16.725967Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:22:16.726219Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl1" took 279us result status StatusSuccess 2026-02-19T08:22:16.726728Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl1" PathDescription { Self { Name: "IncrBackupImpl1" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:22:16.727940Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:22:16.728176Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl2" took 266us result status StatusSuccess 2026-02-19T08:22:16.728673Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl2" PathDescription { Self { Name: "IncrBackupImpl2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:22:16.729931Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:22:16.730175Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl3" took 250us result status StatusSuccess 2026-02-19T08:22:16.730685Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl3" PathDescription { Self { Name: "IncrBackupImpl3" PathId: 49 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 49 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> KqpNotNullColumns::UpdateOnNotNull |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn >> GroupWriteTest::ByTableName [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 >> DataStreams::TestUnsupported [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataStreams::ListStreamsValidation [GOOD] >> DataStreams::TestPutRecords [GOOD] >> DataShardWrite::TliLocksBrokenByWrite [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table >> BasicUsage::SimpleBlockingKeyedWriteSession_NoSeqNo >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> KqpScanArrowInChanels::AggregateCountStar >> DataShardWrite::ImmediateWriteVolatileTxIdOnPageFault >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |92.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2026-02-19T08:22:04.735473Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488712550080455:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:04.738624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:04.816975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:05.664494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:05.739187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:05.744874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:05.996459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:06.009081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:06.067567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.098727Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.234137Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010705s 2026-02-19T08:22:06.251811Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005314s 2026-02-19T08:22:06.526431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:06.526454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:06.526460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:06.526546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:06.541796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:07.028023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.137395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:07.338722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:07.362909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-02-19T08:22:11.450315Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488743527046693:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:11.450366Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:11.557897Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:11.733678Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:11.765059Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:11.779655Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:11.779744Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:11.809859Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:11.978576Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.023159s 2026-02-19T08:22:11.915394Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005174s 2026-02-19T08:22:11.941775Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012057s 2026-02-19T08:22:12.103657Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:12.103675Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:12.103681Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:12.103744Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:12.265591Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:12.432609Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:12.465413Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:12.563256Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:12.800812Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:13.029294Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.064933Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608488752116982898:3217], for# user2@builtin, access# DescribeSchema 2026-02-19T08:22:13.075734Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608488752116982901:3218], for# user2@builtin, access# DescribeSchema 2026-02-19T08:22:13.090094Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:17.008662Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488771530351585:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:17.009317Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:17.045483Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:17.139467Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:17.158043Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:17.158123Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:17.185064Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:17.261532Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:17.261553Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:17.261560Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:17.261634Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:17.331084Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:17.480608Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:17.537316Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:17.740509Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 2026-02-19T08:22:18.042177Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: 2026-02-19T08:21:26.005224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488551177625658:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:26.005305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:26.259948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:26.295309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:26.295427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:26.314251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:26.412340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:26.419241Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488546882658331:2081] 1771489286003414 != 1771489286003417 2026-02-19T08:21:26.665992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:26.762375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:26.762397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:26.762407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:26.762566Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:27.025427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:27.303687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:27.314285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:27.366311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.518699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.689328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.778980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:29.991484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488564062529393:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:29.991586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:29.992278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488564062529403:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:29.992337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.385309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.417579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.458476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.530607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.558798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.611747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.695870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.751465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.841304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488568357497573:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.841410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.841870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488568357497578:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.841933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488568357497579:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.842041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.846926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:30.859723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488568357497582:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:30.936669Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488568357497633:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:31.005809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488551177625658:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:31.005877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:32.606947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... ],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"WaitOutputTimeUs\":{\"Count\":4,\"Sum\":5618,\"Max\":1460,\"Min\":1351,\"History\":[22,1361,23,2712,24,4172,51,4172,56,5618,68,5618]},\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"FinishedTasks\":4,\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"DurationUs\":{\"Count\":4,\"Sum\":203000,\"Max\":63000,\"Min\":45000},\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[1,4194304,68,4194304]},\"BaseTimeMs\":1771489339416,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":208,\"Max\":67,\"Min\":46},\"ActiveMessageMs\":{\"Count\":4,\"Max\":67,\"Min\":3},\"FirstMessageMs\":{\"Count\":4,\"Sum\":14,\"Max\":4,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":8268,\"Max\":2100,\"Min\":2029,\"History\":[22,947,23,1031,24,1115,43,1955,45,1955,51,6239,56,6491,68,8268]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":194,\"Max\":63,\"Min\":43}},\"LocalBytes\":8268,\"Name\":\"4\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":205,\"Max\":67,\"Min\":45},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":3,\"Sum\":160,\"Max\":67,\"Min\":46},\"FirstMessageMs\":{\"Count\":4,\"Sum\":14,\"Max\":4,\"Min\":3},\"ActiveMessageMs\":{\"Count\":4,\"Max\":67,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":8268,\"Max\":2100,\"Min\":2029,\"History\":[22,947,23,1871,24,2879,43,3803,45,5651,51,6239,56,6575,68,8268]},\"PauseMessageMs\":{\"Count\":3,\"Sum\":98,\"Max\":50,\"Min\":1},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":191,\"Max\":63,\"Min\":41},\"WaitTimeUs\":{\"Count\":3,\"Sum\":59598,\"Max\":41862,\"Min\":442,\"History\":[22,17271,43,38542,51,42304,56,49279,68,59598]},\"WaitPeriods\":{\"Count\":4,\"Sum\":32,\"Max\":15,\"Min\":1},\"WaitMessageMs\":{\"Count\":3,\"Max\":67,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":4,\"Sum\":10368,\"Max\":2869,\"Min\":2267,\"History\":[1,1442,22,2556,23,3401,24,4374,43,5290,45,7405,51,7993,56,8310,68,10368]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":205,\"Max\":67,\"Min\":45},\"ActiveMessageMs\":{\"Count\":4,\"Max\":67,\"Min\":3},\"FirstMessageMs\":{\"Count\":4,\"Sum\":13,\"Max\":4,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,4256,23,8128,24,12352,43,16224,45,23968,51,26432,56,27840,68,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":192,\"Max\":63,\"Min\":42}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":205,\"Max\":67,\"Min\":45},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":205,\"Max\":67,\"Min\":45},\"FirstMessageMs\":{\"Count\":4,\"Sum\":13,\"Max\":4,\"Min\":3},\"ActiveMessageMs\":{\"Count\":4,\"Max\":67,\"Min\":3},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,4256,23,8128,24,12352,43,16224,45,23968,51,26432,56,27840,68,35200]},\"PauseMessageMs\":{\"Count\":4,\"Sum\":8,\"Max\":2,\"Min\":2},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":192,\"Max\":63,\"Min\":42},\"WaitTimeUs\":{\"Count\":4,\"Sum\":196512,\"Max\":64822,\"Min\":42420,\"History\":[22,18784,23,38370,24,58174,43,79650,45,121658,51,131690,56,184626,68,196512]},\"WaitPeriods\":{\"Count\":4,\"Sum\":10,\"Max\":4,\"Min\":1},\"WaitMessageMs\":{\"Count\":4,\"Max\":67,\"Min\":2}}}],\"StageDurationUs\":64000,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":72718,\"Max\":55468,\"Min\":4539,\"History\":[22,3154,23,8685,24,12100,43,14825,45,16752,51,17250,56,67797,68,72718]},\"OutputBytes\":{\"Count\":4,\"Sum\":8268,\"Max\":2100,\"Min\":2029},\"UpdateTimeMs\":68,\"Tasks\":4}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8268,\"Max\":8268,\"Min\":8268},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":65000,\"Max\":65000,\"Min\":65000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,68,1048576]},\"BaseTimeMs\":1771489339416,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":48,\"Max\":48,\"Min\":48},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":68,\"Max\":68,\"Min\":68},\"ActiveMessageMs\":{\"Count\":1,\"Max\":68,\"Min\":5},\"FirstMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"Bytes\":{\"Count\":1,\"Sum\":7906,\"Max\":7906,\"Min\":7906,\"History\":[24,947,43,1787,62,7053,68,7906]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":63,\"Max\":63,\"Min\":63}},\"LocalBytes\":7906,\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":68,\"Max\":68,\"Min\":68},\"Chunks\":{\"Count\":1,\"Sum\":48,\"Max\":48,\"Min\":48},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":68,\"Max\":68,\"Min\":68},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":68,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":7906,\"Max\":7906,\"Min\":7906,\"History\":[24,1031,43,1787,62,7137,68,7906]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"WaitTimeUs\":{\"Count\":1,\"Sum\":44605,\"Max\":44605,\"Min\":44605,\"History\":[24,14672,43,25462,62,40376,68,44605]},\"WaitPeriods\":{\"Count\":1,\"Sum\":46,\"Max\":46,\"Min\":46},\"WaitMessageMs\":{\"Count\":1,\"Max\":68,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":7662,\"Max\":7662,\"Min\":7662,\"History\":[2,373,24,1883,43,2999,62,6692,68,7662]},\"StageDurationUs\":65000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7906,\"Max\":7906,\"Min\":7906},\"OutputBytes\":{\"Count\":1,\"Sum\":7906,\"Max\":7906,\"Min\":7906},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":67,\"Max\":67,\"Min\":67},\"ActiveMessageMs\":{\"Count\":1,\"Max\":67,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8268,\"Max\":8268,\"Min\":8268,\"History\":[24,1283,43,2039,62,7499,68,8268]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64}},\"LocalBytes\":8268,\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":67,\"Max\":67,\"Min\":67},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":67,\"Max\":67,\"Min\":67},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveMessageMs\":{\"Count\":1,\"Max\":67,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":8268,\"Max\":8268,\"Min\":8268,\"History\":[24,3299,43,5819,62,7499,68,8268]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"WaitTimeUs\":{\"Count\":1,\"Sum\":14899,\"Max\":14899,\"Min\":14899,\"History\":[24,4639,43,9570,62,13696,68,14899]},\"WaitPeriods\":{\"Count\":1,\"Sum\":32,\"Max\":32,\"Min\":32},\"WaitMessageMs\":{\"Count\":1,\"Max\":67,\"Min\":1}}}],\"UpdateTimeMs\":68,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":175785,\"CpuTimeUs\":168436},\"ProcessCpuTimeUs\":336,\"TotalDurationUs\":331011,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":74878},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:38\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'368) \'(\'\"_id\" \'\"bc644807-18463f8f-77c8f0e3-8da9cd35\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'380) \'(\'\"_id\" \'\"2d5a79f8-9a6f5831-563061db-143c9d99\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 331011 total_cpu_time_us: 186802 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":38},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1771489339\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"a26fb437-318e6122-6f66bcf-81c1ca26\",\"version\":\"1.0\"}" >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> DataShardWrite::ImmediateWriteVolatileTxIdOnPageFault [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 3841105172014120897 2026-02-19T08:21:45.669685Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:45.694709Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:45.694793Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:45.697797Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:21:45.711434Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:45.714888Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:22:24.271894Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet received PoisonPill, going to die 2026-02-19T08:22:24.271984Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:22:24.321193Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 received TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> KqpScanArrowInChanels::AllTypesColumns >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] >> BasicUsage::SimpleBlockingKeyedWriteSession_NoSeqNo [GOOD] >> KqpScanArrowFormat::AggregateCountStar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/unittest >> PhantomBlobs::TestEnabling [GOOD] Test command err: RandomSeed# 14118783750296309797 Write blobs Set Keep flags Wait for sync Stop dead nodes Stop node# 1 2026-02-19T08:14:54.711469Z 2 00h31m00.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 1 Wait for sync Set DoNotKeepFlags on first half of blobs Wait for sync Write unsynced blobs batch, blobs written# 0 Write unsynced blobs batch, blobs written# 1000 Write unsynced blobs batch, blobs written# 2000 Write unsynced blobs batch, blobs written# 3000 Write unsynced blobs batch, blobs written# 4000 Write unsynced blobs batch, blobs written# 5000 Write unsynced blobs batch, blobs written# 6000 Write unsynced blobs batch, blobs written# 7000 Write unsynced blobs batch, blobs written# 8000 Write unsynced blobs batch, blobs written# 9000 Force syncLog trim Checking memory consumption: Consumed# 0 Limit# 82944 Checking memory consumption: Consumed# 0 Limit# 82944 Set DoNotKeepFlags on second half of blobs Wait for sync Restart nodes Start dead nodes Start node# 1 Wait for sync Move soft barrier Wait for sync Group status# TEvStatusResult {Status# OK StatusFlags# { }} Write blobs Set Keep flags Wait for sync Stop dead nodes Stop node# 1 2026-02-19T08:18:42.882618Z 2 00h31m00.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 1 Wait for sync Set DoNotKeepFlags on first half of blobs Wait for sync Write unsynced blobs batch, blobs written# 0 Write unsynced blobs batch, blobs written# 1000 Write unsynced blobs batch, blobs written# 2000 Write unsynced blobs batch, blobs written# 3000 Write unsynced blobs batch, blobs written# 4000 Write unsynced blobs batch, blobs written# 5000 Write unsynced blobs batch, blobs written# 6000 Write unsynced blobs batch, blobs written# 7000 Write unsynced blobs batch, blobs written# 8000 Write unsynced blobs batch, blobs written# 9000 Force syncLog trim Checking memory consumption: Consumed# 0 Limit# 82944 Checking memory consumption: Consumed# 0 Limit# 82944 Set DoNotKeepFlags on second half of blobs Wait for sync Restart nodes Start dead nodes Start node# 1 Wait for sync Move soft barrier Wait for sync Group status# TEvStatusResult {Status# OK StatusFlags# { }} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2026-02-19T08:22:04.245755Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005885s 2026-02-19T08:22:04.539239Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.006929s 2026-02-19T08:22:04.544986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488715285705210:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:04.545089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:05.453988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:05.567019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:05.567103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:05.705536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:05.914387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:06.057296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:06.094275Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.214877Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008733s 2026-02-19T08:22:06.213825Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.017700s 2026-02-19T08:22:06.430243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:06.430263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:06.430273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:06.430337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:06.892488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.038906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:07.359085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:11.499112Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488744646007763:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:11.499617Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:11.604641Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:11.786012Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:11.827150Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:11.827226Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:11.853484Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:11.885589Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:12.098566Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:12.098588Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:12.098598Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:12.098662Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:12.377555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:12.506415Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:12.510194Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:12.733804Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:12.994551Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:13.074748Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2026-02-19T08:22:13.199357Z :INFO: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] Starting read session 2026-02-19T08:22:13.199415Z :DEBUG: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] Starting session to cluster null (localhost:32074) 2026-02-19T08:22:13.205794Z :DEBUG: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:22:13.205846Z :DEBUG: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:22:13.205877Z :DEBUG: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] [null] Reconnecting session to cluster null in 0.000000s 2026-02-19T08:22:13.237107Z :DEBUG: [/Root/] [/Root/] [f6a15c9c-1e042695-20320d01-8363e4af] [null] Successfully connected. Initializing session 2026-02-19T08:22:13.238463Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2026-02-19T08:22:13.238487Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 1 2026-02-19T08:22:13.267728Z node 4 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-02-19T08:22:13.268154Z node 4 :PQ_READ_PROXY INFO: read_session_actor.cpp:944: session cookie 1 consumer user1 session user1_4_1_16218102263266949934_v1 read init: from# ipv6:[::1]:55912, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2026-02-19T08:22:13.268328Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer user1 session user1_4_1_16218102263266949934_v1 auth for : user1 2026-02-19T08:22:13.274349Z node 4 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:142: session cookie 1 ... lientReadOffset 0 2026-02-19T08:22:21.912377Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:22:21.912589Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2026-02-19T08:22:21.912599Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2026-02-19T08:22:21.912776Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2026-02-19T08:22:21.912839Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2026-02-19T08:22:21.914154Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2026-02-19T08:22:21.914191Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2026-02-19T08:22:21.914252Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2026-02-19T08:22:21.986862Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:22:21.987010Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2026-02-19T08:22:21.987020Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2026-02-19T08:22:21.987104Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2026-02-19T08:22:21.987113Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-02-19T08:22:21.987502Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2026-02-19T08:22:21.987514Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2026-02-19T08:22:21.987544Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2026-02-19T08:22:21.987559Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2026-02-19T08:22:21.987578Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2026-02-19T08:22:21.987607Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2026-02-19T08:22:21.987965Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-02-19T08:22:21.988010Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-02-19T08:22:21.988045Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-02-19T08:22:21.988207Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2026-02-19T08:22:21.988237Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2026-02-19T08:22:21.988256Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-02-19T08:22:21.988368Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2026-02-19T08:22:21.989362Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2026-02-19T08:22:21.991488Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2026-02-19T08:22:21.992855Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2026-02-19T08:22:22.000249Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2026-02-19T08:22:22.001162Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2026-02-19T08:22:22.002649Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2026-02-19T08:22:22.003718Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2026-02-19T08:22:22.014128Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2026-02-19T08:22:22.014214Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2026-02-19T08:22:22.017126Z :INFO: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] Closing read session. Close timeout: 0.000000s 2026-02-19T08:22:22.017215Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:0:5:1:0 null:stream_TestPutRecordsCornerCases:1:4:8:0 null:stream_TestPutRecordsCornerCases:2:3:0:0 null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:3:1:3:0 2026-02-19T08:22:22.017279Z :INFO: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 143 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:22:22.017411Z :NOTICE: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:22:22.017470Z :DEBUG: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] [null] Abort session to cluster 2026-02-19T08:22:22.018521Z :NOTICE: [/Root/] [/Root/] [9d0a3b82-346299b8-e4a1d0f8-7960f84f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:22:22.018664Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer user1 session user1_7_1_5976826700856190026_v1 grpc read failed 2026-02-19T08:22:22.018711Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer user1 session user1_7_1_5976826700856190026_v1 grpc closed 2026-02-19T08:22:22.018760Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer user1 session user1_7_1_5976826700856190026_v1 is DEAD 2026-02-19T08:22:23.606566Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608488795336487812:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:23.606629Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:23.623357Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:23.715916Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:23.735087Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:23.735178Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:23.750829Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:23.829296Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:23.829315Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:23.829322Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:23.829384Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:23.883442Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:24.109939Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:24.220741Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:24.422307Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:24.612415Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:24.683745Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101)
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2026-02-19T08:22:24.798564Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2026-02-19T08:22:24.629359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:24.629436Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:24.632201Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:22:24.652345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:22:24.652790Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:22:24.653062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:22:24.702474Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:22:24.708955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:22:24.709153Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:22:24.710805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:22:24.710873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:22:24.710926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:22:24.711336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:22:24.711415Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:22:24.711500Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:22:24.771924Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:22:24.801284Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:22:24.801463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:22:24.801541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:22:24.801576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:22:24.801603Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:22:24.801630Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:24.801804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:24.801839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:24.802075Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:22:24.802167Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:22:24.802279Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:24.802308Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:24.802353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:22:24.802380Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:22:24.802413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:22:24.802436Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:22:24.802468Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:24.802542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.802579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.802609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:22:24.809573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:22:24.809653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:22:24.809768Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:22:24.809946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:22:24.810014Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:22:24.810079Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:22:24.810118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:22:24.810151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:22:24.810186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:22:24.810218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:24.810513Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:22:24.810541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:22:24.810568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:22:24.810596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:24.810639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:22:24.810657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:22:24.810681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:22:24.810701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:24.810717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:22:24.822700Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:22:24.822791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:24.822834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:24.822863Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:22:24.822926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:22:24.823354Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.823395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.823426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:22:24.823543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:22:24.823569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:22:24.823689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:24.823732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:22:24.823762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:22:24.823794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:22:24.827029Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:22:24.827105Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:24.827304Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:24.827345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:24.827397Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:24.827435Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:22:24.827482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:22:24.827523Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:22:24.827557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 22:29.997946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompleteOperation 2026-02-19T08:22:29.998040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is DelayComplete 2026-02-19T08:22:29.998072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2026-02-19T08:22:29.998113Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2026-02-19T08:22:29.998143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2026-02-19T08:22:29.998164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is Executed 2026-02-19T08:22:29.998177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2026-02-19T08:22:29.998190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:154] at 9437184 has finished 2026-02-19T08:22:29.998213Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:29.998233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:22:29.998259Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:22:29.998283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:22:30.017107Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:30.017158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-02-19T08:22:30.017210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:22:30.017273Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:22:30.017319Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:30.017463Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:30.017498Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:30.017522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:22:30.017565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:22:30.017608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:22:30.017631Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:30.018015Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:22:30.018076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.018113Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:22:30.018218Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:22:30.018245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.018281Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-02-19T08:22:30.018351Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.018380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.018419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:30.018489Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:22:30.018513Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.018628Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.018651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.018683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:22:30.018708Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.018791Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.018813Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.018855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:30.018886Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:22:30.018902Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.018995Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.019030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.019059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:30.019092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:22:30.019113Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.019204Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.019223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.019248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:22:30.019268Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.019366Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:30.019395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-02-19T08:22:30.019440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:22:30.019486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:22:30.019508Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:30.019645Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:347:2313]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:22:30.019678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.019706Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-02-19T08:22:30.019833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:347:2313]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:22:30.019857Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.019872Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-02-19T08:22:30.019918Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:347:2313]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:22:30.019939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.019964Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:22:30.020025Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:347:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:22:30.020053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:30.020072Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: 2026-02-19T08:21:20.065051Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488526798461066:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:20.066226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:20.298479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:20.346624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:20.346729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:20.422077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:20.425367Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:20.427006Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488526798461024:2081] 1771489280052152 != 1771489280052155 2026-02-19T08:21:20.544147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:20.575435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:20.575458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:20.575467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:20.575660Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:21.095178Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:21.356155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:21.426551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:21.641803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:21.810107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:21.893667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.120164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488543978332090:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:24.120287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:24.120784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488543978332100:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:24.120857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:24.550858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.644007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.702957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.742063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.777850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.832849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.899447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.955190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:25.058259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488526798461066:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:25.058314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:25.090425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488548273300267:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:25.090513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:25.090799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488548273300272:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:25.090829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488548273300273:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:25.090940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:25.095562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:25.121113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488548273300276:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:25.204182Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488548273300330:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:26.937352Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZTkxNzExNTItN2E4ODdiMGQtMWFkNzQzMy01NGMxYzNlMQ==, ActorId: [1:7608488552568267930:2538], ActorState: ExecuteState, LegacyTraceId: 01khtftpq7aya8dw9ekjj4cg13, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 50ms exceeded" se ... 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.134355Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:22:07.155544Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:07.418211Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:07.927791Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:08.227838Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:08.896374Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1913:3519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:08.897223Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:08.898410Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1987:3538], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:08.898603Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:08.934505Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:09.149199Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:09.506229Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:09.789792Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.082139Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.363673Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.654130Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:11.033337Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:11.448879Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:11.449032Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:11.449509Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2803:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:11.449602Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2806:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:11.449659Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:11.456331Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:11.657497Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:2808:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:22:11.735945Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:2868:4229] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:14.454724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.758033Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:15.101554Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:17.160004Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2118: SelfId: [5:3573:4774], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfw638fezbfcmzrat2rbb4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2Q4MjNkMmEtZWU0YTRiYmEtYWZlMDE0ZmQtYjhiOGNmMWM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2026-02-19T08:22:17.160178Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [5:3573:4774], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfw638fezbfcmzrat2rbb4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2Q4MjNkMmEtZWU0YTRiYmEtYWZlMDE0ZmQtYjhiOGNmMWM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2026-02-19T08:22:17.161279Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [5:3574:4775], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01khtfw638fezbfcmzrat2rbb4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2Q4MjNkMmEtZWU0YTRiYmEtYWZlMDE0ZmQtYjhiOGNmMWM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:3567:4462], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-02-19T08:22:17.162282Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=Y2Q4MjNkMmEtZWU0YTRiYmEtYWZlMDE0ZmQtYjhiOGNmMWM=, ActorId: [5:3152:4462], ActorState: ExecuteState, LegacyTraceId: 01khtfw638fezbfcmzrat2rbb4, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Table \'/Root/SecondaryKeys\' retry limit exceeded." severity: 1 } trace_id# 2026-02-19T08:22:17.286201Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:22:17.286286Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:15.326422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:15.451770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:15.473424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:15.473515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:15.474182Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:15.809605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:15.809766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:15.887040Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489272393734 != 1771489272393738 2026-02-19T08:21:15.896504Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:15.953282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.052022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:16.343796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.357689Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:16.467318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:16.522155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:887:2768], Recipient [1:902:2778]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:16.523296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:887:2768], Recipient [1:902:2778]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:16.523596Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2778] 2026-02-19T08:21:16.523840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:16.534362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:887:2768], Recipient [1:902:2778]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:16.571192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:890:2770], Recipient [1:907:2781]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:16.572437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:16.572687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:16.574444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:21:16.574540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:21:16.574596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:21:16.574965Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:16.575062Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:890:2770], Recipient [1:907:2781]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:16.575331Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:907:2781] 2026-02-19T08:21:16.575543Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:16.584208Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:16.584345Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2778] in generation 1 2026-02-19T08:21:16.584557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:890:2770], Recipient [1:907:2781]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:16.585409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:16.585530Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:16.587113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:21:16.587168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:21:16.587209Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:21:16.587448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:16.587599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:16.587703Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:935:2781] in generation 1 2026-02-19T08:21:16.598658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:16.632547Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:21:16.632777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:16.632900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:938:2799] 2026-02-19T08:21:16.632966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:21:16.633010Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:21:16.633177Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:21:16.633495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:902:2778], Recipient [1:902:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.633568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.633732Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:16.633780Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:21:16.633866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:16.633934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:939:2800] 2026-02-19T08:21:16.633963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:21:16.634012Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:21:16.634046Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:21:16.634370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:907:2781], Recipient [1:907:2781]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.634399Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.634565Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:21:16.634662Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:21:16.634738Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:21:16.634779Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:21:16.634815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:21:16.634844Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:21:16.634886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:21:16.634916Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:21:16.634956Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:21:16.635011Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:21:16.635052Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:21:16.635171Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:904:2779], Recipient [1:902:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.635224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.635282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:896:2774], serverId# [1:904:2779], sessionId# [0:0:0] 2026-02-19T08:21:16.635345Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:21:16.635373Z node 1 :TX_DAT ... rk, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:28.039988Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-02-19T08:22:28.040137Z node 12 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2026-02-19T08:22:28.040315Z node 12 :TX_DATASHARD INFO: datashard.cpp:760: Complete volatile write [3000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-02-19T08:22:28.040397Z node 12 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:22:28.040484Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [12:927:2789], Recipient [12:925:2787]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-02-19T08:22:28.040505Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:28.040527Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-02-19T08:22:28.181611Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [12:1170:2972], Recipient [12:925:2787]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:22:28.181841Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:22:28.181960Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-02-19T08:22:28.182079Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:22:28.182131Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:22:28.182182Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:28.182229Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:28.182286Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-02-19T08:22:28.182361Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:22:28.182391Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:28.182433Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:22:28.182459Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:22:28.182625Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:22:28.182934Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-02-19T08:22:28.183003Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[12:1170:2972], 0} after executionsCount# 1 2026-02-19T08:22:28.183078Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[12:1170:2972], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:28.183183Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[12:1170:2972], 0} finished in read 2026-02-19T08:22:28.183268Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:22:28.183301Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:22:28.183334Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:28.183360Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:28.183409Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:22:28.183435Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:28.183466Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-02-19T08:22:28.183522Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:22:28.183655Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:22:28.184487Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [12:1170:2972], Recipient [12:925:2787]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:22:28.184568Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:22:28.184817Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [12:1170:2972], Recipient [12:927:2789]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2026-02-19T08:22:28.184911Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:22:28.184959Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-02-19T08:22:28.185016Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:28.185059Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:22:28.185086Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:28.185110Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:22:28.185147Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-02-19T08:22:28.185177Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:28.185199Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:28.185228Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:22:28.185258Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:22:28.185356Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-02-19T08:22:28.185525Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-02-19T08:22:28.185561Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[12:1170:2972], 1} after executionsCount# 1 2026-02-19T08:22:28.185594Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[12:1170:2972], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:28.185647Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[12:1170:2972], 1} finished in read 2026-02-19T08:22:28.185688Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:28.185732Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:22:28.185755Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:22:28.185793Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:22:28.185834Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:28.185858Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:22:28.185881Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-02-19T08:22:28.185906Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:22:28.185968Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:22:28.187201Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [12:1170:2972], Recipient [12:927:2789]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-02-19T08:22:28.187250Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2026-02-19T08:22:25.064534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:25.064598Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:25.065514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:22:25.079248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:22:25.079618Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:22:25.079881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:22:25.125681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:22:25.133424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:22:25.133647Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:22:25.135456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:22:25.135538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:22:25.135608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:22:25.136084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:22:25.136172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:22:25.136261Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:22:25.208527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:22:25.239681Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:22:25.239913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:22:25.240027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:22:25.240082Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:22:25.240138Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:22:25.240173Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:25.240340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:25.240390Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:25.240696Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:22:25.240790Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:22:25.240895Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:25.240936Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:25.240985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:22:25.241024Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:22:25.241071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:22:25.241118Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:22:25.241161Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:25.241257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:25.241298Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:25.241338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:22:25.248155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:22:25.248244Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:22:25.248345Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:22:25.248540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:22:25.248607Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:22:25.248678Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:22:25.248722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:22:25.248759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:22:25.248794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:22:25.248826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:25.249119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:22:25.249153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:22:25.249214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:22:25.249255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:25.249305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:22:25.249333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:22:25.249371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:22:25.249402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:25.249426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:22:25.261768Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:22:25.261849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:25.261919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:25.261964Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:22:25.262048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:22:25.262565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:25.262615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:25.262665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:22:25.262824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:22:25.262861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:22:25.263019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:25.263060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:22:25.263096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:22:25.263133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:22:25.266996Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:22:25.267086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:25.267319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:25.267363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:25.267423Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:25.267463Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:22:25.267500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:22:25.267542Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:22:25.267587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:22:35.392944Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.392966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.392996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.393047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-02-19T08:22:35.393077Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.393203Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:22:35.393228Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:22:35.393249Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:22:35.393276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.393300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.393333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.393371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-02-19T08:22:35.393394Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.393497Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:22:35.393516Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.393537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.393587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.393635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-02-19T08:22:35.393658Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.393774Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.393798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.393832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.393877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-02-19T08:22:35.393900Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.393991Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.394014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.394042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.394098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-02-19T08:22:35.394131Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.394254Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.394290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:516] at 9437184 on unit FinishPropose 2026-02-19T08:22:35.394336Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-02-19T08:22:35.394414Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.394551Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.394575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.394611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:35.394654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-02-19T08:22:35.394679Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.394780Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:35.394828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2026-02-19T08:22:35.394864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:22:35.394904Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:35.395218Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-02-19T08:22:35.395261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.395307Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2026-02-19T08:22:35.395426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-02-19T08:22:35.395456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.395489Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2026-02-19T08:22:35.395576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-02-19T08:22:35.395604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.395632Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2026-02-19T08:22:35.395781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-02-19T08:22:35.395821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.395860Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2026-02-19T08:22:35.395962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-02-19T08:22:35.396001Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.396030Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2026-02-19T08:22:35.396139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-02-19T08:22:35.396168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.396193Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2026-02-19T08:22:35.396264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-02-19T08:22:35.396291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.396316Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2026-02-19T08:22:35.396447Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-02-19T08:22:35.396480Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:35.396504Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: 2026-02-19T08:21:26.025796Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488548939170315:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:26.026422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:26.074897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:26.485293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:26.485425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:26.488566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:26.549876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:26.617873Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488548939170286:2081] 1771489286024286 != 1771489286024289 2026-02-19T08:21:26.631901Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:26.805827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:26.914475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:26.914498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:26.914505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:26.914577Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:27.039299Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:27.482205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:27.545140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.707773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.912624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.996406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.496445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488566119041350:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.496689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.497328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488566119041360:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.497393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.915558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.979726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.014259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.026162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488548939170315:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:31.026225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:31.093901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.132875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.181587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.232459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.302774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.417344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488570414009532:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.417441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.417810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488570414009538:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.417834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488570414009537:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.417909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.435974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:31.449947Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488570414009541:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:31.535923Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488570414009593:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:38.119383Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488601443290039: ... FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.229971Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.259780Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488677139324766:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:00.259869Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:00.289338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:00.610859Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:00.930090Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488698614163452:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.930186Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.930696Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488698614163457:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.930745Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608488698614163458:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.930859Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:00.935519Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:00.947734Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608488698614163461:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:22:01.006788Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608488702909130808:3401] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:04.989682Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:05.000669Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:05.413832Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:05.449275Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:05.458000Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608488712868728205:2081] 1771489324903548 != 1771489324903551 2026-02-19T08:22:05.538496Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:05.538590Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:05.545606Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:05.742464Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:05.742487Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:05.742496Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:05.742583Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:05.796156Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:05.957855Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:06.751041Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:06.762447Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:09.629417Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488734343565580:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.629507Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.631172Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488734343565590:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.631262Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.682057Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:09.736721Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:09.848088Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488734343565753:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.848183Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.848456Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488734343565758:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.848502Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488734343565759:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.848606Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:09.854834Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:09.867318Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488734343565762:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:22:09.956500Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488734343565814:2644] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:10.304399Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037888 Cancelled read: {[5:7608488738638533168:2369], 0} >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query >> BasicUsage::SimpleBlockingKeyedWriteSession_ManyMessages >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2026-02-19T08:22:05.649782Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.107139s 2026-02-19T08:22:05.954486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488718890985800:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:05.954539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:06.618843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.703132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:06.703231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:06.739044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:06.981792Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.999465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:07.057056Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:07.295017Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.022028s 2026-02-19T08:22:07.412661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:07.412688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:07.412703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:07.412784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:07.768766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.931474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:08.219463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:08.551647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:22:08.671933Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:22:08.672001Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-02-19T08:22:08.672016Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-02-19T08:22:08.672032Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:22:12.856264Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488746487785574:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:12.911515Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:12.923122Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:13.095198Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:13.107947Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:13.111445Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:13.111521Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:13.137562Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:13.237982Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:13.238007Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:13.238012Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:13.238085Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:13.486530Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:13.547086Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.689800Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:13.754584Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:13.780358Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-02-19T08:22:13.893850Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:14.080985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:14.176026Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:22:14.202204Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2026-02-19T08:22:14.202232Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2026-02-19T08:22:14.202244Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2026-02-19T08:22:14.202258Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2026-02-19T08:22:17.698101Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488771886162452:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:17.698155Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:17.725999Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:17.815961Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:17.845986Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:17.846108Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:17.893345Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:17.923737Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:18.033798Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.014375s 2026-02-19T08:22:18.107536Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:18.107560Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:18.107567Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:18.107644Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:18.456964Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:18.572274Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:18.703178Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:18.794450Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:18.809865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-02-19T08:22:18.963872Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:19.021470Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:19.060757Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:22:19.078713Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-02-19T08:22:19.078748Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-02-19T08:22:19.078760Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-02-19T08:22:19.078774Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-02-19T08:22:22.755855Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608488791273757828:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:22.756513Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:22.775925Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:22.878325Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:22.896620Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:22.896721Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:22.922141Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:22.955577Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:22.975988Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:22.976025Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:22.976036Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:22.976310Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:23.237467Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:23.291331Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:23.507876Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:23.755951Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:27.754089Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7608488791273757828:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:27.754193Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::TliLocksBrokenByWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:16.046949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:16.189452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:16.213147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:16.213233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:16.213809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:16.517205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.517347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.607294Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489273074301 != 1771489273074305 2026-02-19T08:21:16.618463Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.666773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.780774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:17.107731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:17.123212Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:17.247064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.299047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:17.300079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:17.300357Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:21:17.300605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:17.311070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:17.351978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:17.352103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:17.353855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:21:17.354004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:21:17.354076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:21:17.354469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:17.354610Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:17.354695Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:21:17.365399Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:17.394333Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:21:17.394567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:17.394693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:21:17.394738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:21:17.394779Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:21:17.394829Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:21:17.395126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:17.395174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:17.395540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:21:17.395660Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:21:17.395733Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:21:17.395773Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:21:17.395816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:21:17.395855Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:21:17.395888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:21:17.395924Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:21:17.395977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:21:17.396068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:17.396112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:17.396158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:21:17.396296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:21:17.396340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:21:17.396438Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:21:17.396712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:21:17.396767Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:21:17.396865Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:21:17.396917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:21:17.396958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:21:17.397004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:21:17.397040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:21:17.397362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:21:17.397401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:21:17.397434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:21:17.397480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:21:17.397539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:21:17.397569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:21:17.397602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:21:17.397643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:21:17.397671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:21:17.399484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:21:17.399549Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:21:17.410487Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... d_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-02-19T08:22:20.259057Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:20.259084Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-02-19T08:22:20.259111Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:20.259137Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:20.259181Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/0 2026-02-19T08:22:20.259225Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-02-19T08:22:20.259256Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:20.259280Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:20.259303Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:22:20.259325Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:22:20.259350Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:20.259371Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:22:20.259392Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-02-19T08:22:20.259417Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:22:20.259444Z node 10 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:6] at 72075186224037888 2026-02-19T08:22:20.259544Z node 10 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-02-19T08:22:20.259604Z node 10 :TX_DATASHARD DEBUG: execute_write_unit.cpp:561: Skip empty write operation for [0:6] at 72075186224037888 2026-02-19T08:22:20.259682Z node 10 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:22:20.259766Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:22:20.259819Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:22:20.259878Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:22:20.259924Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:20.259959Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-02-19T08:22:20.259988Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:22:20.260015Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:20.260042Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:20.260083Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:20.260107Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:20.260137Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-02-19T08:22:20.260216Z node 10 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:22:20.260252Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:20.260288Z node 10 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:22:20.260362Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:22:20.261858Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [10:71:2118], Recipient [10:892:2770]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 10 Status: STATUS_NOT_FOUND 2026-02-19T08:22:20.262302Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [10:1114:2919], Recipient [10:892:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.262340Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.262393Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [10:1113:2918], serverId# [10:1114:2919], sessionId# [0:0:0] 2026-02-19T08:22:20.262603Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553169, Sender [10:1112:2917], Recipient [10:892:2770]: NKikimrTxDataShard.TEvGetInfoRequest 2026-02-19T08:22:20.263679Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [10:1117:2922], Recipient [10:892:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.263745Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.263809Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [10:1116:2921], serverId# [10:1117:2922], sessionId# [0:0:0] 2026-02-19T08:22:20.264010Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [10:1115:2920], Recipient [10:892:2770]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-02-19T08:22:20.264163Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:22:20.264241Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-02-19T08:22:20.264298Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-02-19T08:22:20.264376Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-02-19T08:22:20.264476Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:20.264524Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:22:20.264568Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:20.264621Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:20.264678Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-02-19T08:22:20.264733Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:20.264760Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:20.264787Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:22:20.264814Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:22:20.264921Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-02-19T08:22:20.265161Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[10:1115:2920], 1000} after executionsCount# 1 2026-02-19T08:22:20.265242Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[10:1115:2920], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:20.265333Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[10:1115:2920], 1000} finished in read 2026-02-19T08:22:20.265391Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:20.265415Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:22:20.265436Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:20.265455Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:20.265489Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:20.265506Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:20.265529Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-02-19T08:22:20.265575Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:22:20.265686Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ... waiting for SysViewsRoster update finished ... waiting for SysViewsRoster update finished (done) { items { uint32_value: 1 } items { uint32_value: 100 } } >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] Test command err: 2026-02-19T08:21:22.404682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488533470989392:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:22.406581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:22.431481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:22.748672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:22.748822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:22.758536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:22.803932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:22.853159Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:22.854476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488533470989359:2081] 1771489282401480 != 1771489282401483 2026-02-19T08:21:22.973445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:22.973471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:22.973478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:22.973579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:22.988273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:23.419690Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:23.677149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:23.728021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.896694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.050127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:24.109670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.114921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488550650860430:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:26.115050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:26.115409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488550650860440:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:26.115455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:26.614166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.665239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.729596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.786748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.851206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.917237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:26.972342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.016617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:27.117181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488554945828610:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:27.117285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:27.117633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488554945828616:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:27.117733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488554945828615:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:27.117857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:27.121908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:27.136587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488554945828619:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:27.223156Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488554945828670:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:27.406159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488533470989392:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:27.406240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:28.925495Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488559240796271:2543], status: UNSUPPORTED, issues: ... Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:09.432387Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:09.522611Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:09.522636Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:09.522646Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:09.522730Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:09.805193Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:10.078206Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:10.089078Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:10.098762Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:10.105536Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.199667Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.439015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.534976Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.023375Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488758429246231:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.023492Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.029929Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488758429246241:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.030075Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.095273Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608488736954408036:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:14.095389Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:14.145054Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.196457Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.295893Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.339658Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.380045Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.423560Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.466607Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.525101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.622688Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488758429247116:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.622825Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.623134Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488758429247121:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.623147Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608488758429247122:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.623210Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.627755Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:14.641206Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608488758429247125:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:14.717540Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608488758429247176:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:21.044593Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608488788494018961:2692], status: GENERIC_ERROR, issues:
:1:0: Error: mismatched input 'sub' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES} 2026-02-19T08:22:21.045088Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=MWNiMWQwOTQtYWQ2YWI5YTUtYTM3ZGEwNy00YTQyMzhmMg==, ActorId: [5:7608488788494018959:2691], ActorState: ExecuteState, LegacyTraceId: 01khtfwbjqcx6vxzadfzg5sn2w, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 } message: "mismatched input \'sub\' expecting {\';\', \'(\', \'$\', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES}" end_position { row: 1 } severity: 1 } tx_id# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2026-02-19T08:22:09.057253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:09.057331Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:09.059954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:22:09.074750Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:22:09.075137Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:22:09.075508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:22:09.121892Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:22:09.127706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:22:09.127874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:22:09.129214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:22:09.129276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:22:09.129310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:22:09.129699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:22:09.129787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:22:09.129850Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:22:09.215585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:22:09.245981Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:22:09.246193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:22:09.246297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:22:09.246359Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:22:09.246394Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:22:09.246429Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:09.246603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:09.246654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:09.246992Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:22:09.247090Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:22:09.247272Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:09.247313Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:09.247370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:22:09.247421Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:22:09.247465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:22:09.247498Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:22:09.247537Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:22:09.247622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:09.247659Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:09.247698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:22:09.254643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:22:09.254760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:22:09.254913Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:22:09.255124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:22:09.255209Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:22:09.255286Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:22:09.255362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:22:09.255398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:22:09.255436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:22:09.255469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:09.255808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:22:09.255852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:22:09.255899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:22:09.255930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:09.255995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:22:09.256026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:22:09.256060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:22:09.256090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:09.256115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:22:09.274581Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:22:09.274694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:22:09.274763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:22:09.274808Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:22:09.274919Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:22:09.275640Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:09.275712Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:09.275779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:22:09.275937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:22:09.275974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:22:09.276158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:22:09.276223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:22:09.276281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:22:09.276323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:22:09.280546Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:22:09.282072Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:22:09.282437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:09.282512Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:09.282585Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:22:09.282628Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:22:09.282674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:22:09.282740Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:22:09.282788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:348:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:22:21.431448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.431469Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:22:21.431608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:22:21.431637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.431653Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-02-19T08:22:21.431710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.431732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.431763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.431824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:22:21.431880Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.432061Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432096Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432119Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.432143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.432190Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.432253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:22:21.432286Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.432391Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432413Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432436Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.432457Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.432494Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.432544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:22:21.432570Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.432688Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432725Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:22:21.432753Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.432785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.432818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.432855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:22:21.432881Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.432990Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.433010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.433041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.433096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:22:21.433127Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.433235Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.433259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.433312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.433355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:22:21.433382Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.433488Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:22:21.433510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:22:21.433545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:22:21.433579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:22:21.433603Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:22:21.433880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:22:21.433927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.433963Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-02-19T08:22:21.434048Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:22:21.434100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434126Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-02-19T08:22:21.434216Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:22:21.434251Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434280Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-02-19T08:22:21.434367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:22:21.434403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434426Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-02-19T08:22:21.434522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:22:21.434547Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434569Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-02-19T08:22:21.434675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:22:21.434720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434747Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:22:21.434816Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:22:21.434847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:21.434868Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: 2026-02-19T08:21:48.747126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488646168765388:2240];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:48.747189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:49.313788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:49.330246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:49.330349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:49.505145Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488646168765176:2081] 1771489308682559 != 1771489308682562 2026-02-19T08:21:49.525435Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:49.539804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:49.649829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:49.837824Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:50.026078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:50.026101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:50.026109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:50.026190Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:51.180687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:51.190501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:21:53.747563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488646168765388:2240];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:53.747642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:53.934975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488667643602558:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.935071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.935423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488667643602568:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:53.935466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.236520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.392997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488671938569957:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.393079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.393361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488671938569962:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.393415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488671938569963:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.393537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.402049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:54.419508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488671938569966:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:21:54.607577Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488671938570017:2599] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:54.966990Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488671938570085:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2026-02-19T08:21:54.967411Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWYwNTM3NjktNWZhMDg5NjQtN2I5MThlOGMtN2FlOWE1ZmE=, ActorId: [1:7608488667643602528:2326], ActorState: ExecuteState, LegacyTraceId: 01khtfvj3j5wfngjgj09g0hc3c, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 61 } message: "At function: KiUpdateTable!" end_position { row: 1 column: 61 } severity: 1 issues { position { row: 1 column: 61 } message: "Cannot update primary key column: Key" end_position { row: 1 column: 61 } severity: 1 } } } tx_id# trace_id# 2026-02-19T08:21:54.997669Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488671938570095:2370], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2026-02-19T08:21:54.998013Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWYwNTM3NjktNWZhMDg5NjQtN2I5MThlOGMtN2FlOWE1ZmE=, ActorId: [1:7608488667643602528:2326], ActorState: ExecuteState, LegacyTraceId: 01khtfvj514bwhbya5b2fsad4v, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 63 } message: "At function: KiUpdateTable!" end_position { row: 1 column: 63 } severity: 1 issues { position { row: 1 column: 63 } message: "Cannot update primary key column: Key" end_position { row: 1 column: 63 } severity: 1 } } } tx_id# trace_id# 2026-02-19T08:21:56.185022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:56.197477Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488681092519410:2278];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:56.197553Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:56.349811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:56.365930Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488681092519147:2081] 1771489316075536 != 1771489316075539 2026-02-19T08:21:56.421373Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:56.425698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:56.426094Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:56.459597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:56.645868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:56.729140Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:56.729162Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:56.729170Z node 2 : ... LOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.063172Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488815157169278:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.063410Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608488815157169280:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.063485Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.066447Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:28.075111Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7608488815157169281:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:22:28.173060Z node 6 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [6:7608488815157169333:2590] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:28.394960Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [6:7608488815157169393:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:22:28.395601Z node 6 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=6&id=ZjU3OTY1NWQtMmM2MjExZjMtODNlMGM1N2EtNjkyMjk4Mw==, ActorId: [6:7608488810862201871:2327], ActorState: ExecuteState, LegacyTraceId: 01khtfwjrx57k7ac9fz0dw7e6z, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 63 } message: "At function: KiWriteTable!" end_position { row: 1 column: 63 } severity: 1 issues { position { row: 1 column: 45 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value\':Null> to Struct<\'Key\':Uint64?,\'Value\':String>" end_position { row: 1 column: 45 } severity: 1 issues { position { row: 1 column: 45 } message: "Failed to convert \'Value\': Null to String" end_position { row: 1 column: 45 } severity: 1 } } issues { position { row: 1 column: 45 } message: "Failed to convert input columns types to scheme types" end_position { row: 1 column: 45 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:29.279060Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488821621027799:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:29.279139Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:29.290448Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:29.388857Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:29.390629Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608488821621027761:2081] 1771489349278048 != 1771489349278051 2026-02-19T08:22:29.404261Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:29.404362Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:29.423298Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:29.461702Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:29.470577Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:29.470601Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:29.470609Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:29.470684Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:29.978408Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:30.285653Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:32.681977Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488834505930536:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.682062Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.682276Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488834505930545:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.682319Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.702586Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:32.774834Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488834505930638:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.774919Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.774987Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488834505930643:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.775100Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608488834505930645:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.775142Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:32.778008Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:32.785845Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608488834505930647:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:22:32.883895Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608488834505930698:2590] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:33.198824Z node 7 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [7:7608488838800898064:2327], TxId: 281474976715663, task: 1. Ctx: { CheckpointId : . TraceId : 01khtfwqay9t1bpqapyqevsshc. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZjM1YTQ0MGMtYWRkNmU5ZmQtNTM1MGZiNmYtZjY0OTRlYjk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 }. 2026-02-19T08:22:33.199581Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=7&id=ZjM1YTQ0MGMtYWRkNmU5ZmQtNTM1MGZiNmYtZjY0OTRlYjk=, ActorId: [7:7608488834505930530:2327], ActorState: ExecuteState, LegacyTraceId: 01khtfwqay9t1bpqapyqevsshc, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "Tried to insert NULL value into NOT NULL column: Value" issue_code: 2031 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:15.187471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:15.344502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:15.366870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:15.366956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:15.367532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:15.695405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:15.695539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:15.763462Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489272572559 != 1771489272572563 2026-02-19T08:21:15.774256Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:15.822404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:15.919072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:16.241384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.255357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:16.361221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:16.394407Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:16.395331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:16.395604Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:21:16.395852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:16.405958Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:16.439915Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:16.440132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:16.442205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:21:16.442344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:21:16.442410Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:21:16.442845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:16.443004Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:16.443097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:21:16.453901Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:16.490378Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:21:16.490640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:16.490779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:21:16.490921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:21:16.490963Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:21:16.491015Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:21:16.491283Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.491329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.491763Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:21:16.491873Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:21:16.491951Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:21:16.491991Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:21:16.492028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:21:16.492125Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:21:16.492164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:21:16.492196Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:21:16.492254Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:21:16.492369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.492409Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.492453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:21:16.492547Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:21:16.492592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:21:16.492713Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:21:16.493015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:21:16.493069Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:21:16.493178Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:21:16.493224Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:21:16.493270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:21:16.493324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:21:16.493367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:21:16.493690Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:21:16.493761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:21:16.493803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:21:16.493847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:21:16.493916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:21:16.493948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:21:16.493983Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:21:16.494019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:21:16.494045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:21:16.495983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:21:16.496037Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:21:16.510148Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:22:23.995232Z node 11 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:22:23.995324Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [11:926:2789], Recipient [11:1158:2963]: {TEvReadSet step# 3001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-02-19T08:22:23.995348Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:22:23.995371Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2026-02-19T08:22:24.137371Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [11:1210:3002], Recipient [11:1158:2963]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.137458Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:24.137512Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [11:1209:3001], serverId# [11:1210:3002], sessionId# [0:0:0] 2026-02-19T08:22:24.152596Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [11:1215:3005], Recipient [11:1158:2963]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:22:24.152793Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:22:24.152909Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-02-19T08:22:24.153044Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:22:24.153112Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:22:24.153170Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:24.153223Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:24.153294Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-02-19T08:22:24.153354Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:22:24.153386Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:24.153411Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:22:24.153436Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:22:24.153583Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:22:24.153950Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-02-19T08:22:24.154026Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[11:1215:3005], 0} after executionsCount# 1 2026-02-19T08:22:24.154112Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[11:1215:3005], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:24.154220Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[11:1215:3005], 0} finished in read 2026-02-19T08:22:24.154309Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:22:24.154341Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:22:24.154369Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:24.154398Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:24.154451Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:22:24.154474Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:24.154511Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-02-19T08:22:24.154561Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:22:24.154710Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:22:24.155623Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [11:1215:3005], Recipient [11:1158:2963]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:22:24.155699Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:22:24.155866Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [11:1215:3005], Recipient [11:926:2789]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-02-19T08:22:24.155986Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:22:24.156048Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-02-19T08:22:24.156113Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:24.156141Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:22:24.156166Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:24.156193Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:22:24.156238Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-02-19T08:22:24.156275Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:24.156299Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:24.156321Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:22:24.156345Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:22:24.156431Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-02-19T08:22:24.156639Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-02-19T08:22:24.156681Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[11:1215:3005], 1} after executionsCount# 1 2026-02-19T08:22:24.156720Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[11:1215:3005], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:24.156779Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[11:1215:3005], 1} finished in read 2026-02-19T08:22:24.156827Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:24.156855Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:22:24.156879Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:22:24.156905Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:22:24.156947Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:22:24.156969Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:22:24.156992Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-02-19T08:22:24.157021Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:22:24.157098Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:22:24.158277Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [11:1215:3005], Recipient [11:926:2789]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-02-19T08:22:24.158329Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::SsaProgramInJsonPlan [GOOD] Test command err: 2026-02-19T08:21:19.153142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488520760131656:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:19.153212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:19.421780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:19.453895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:19.454001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:19.520144Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488520760131523:2081] 1771489279124836 != 1771489279124839 2026-02-19T08:21:19.537412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:19.538254Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:19.604031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:19.728694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:19.728727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:19.728737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:19.728829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:20.123679Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:20.195025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:20.283905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:20.477919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:20.671994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:20.754559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:22.905458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488533645035289:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.905570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.905943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488533645035299:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:22.905987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.270852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.315132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.391815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.445415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.515560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.602910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.663039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.717691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:23.813246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488537940003467:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.813343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.813536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488537940003472:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.813583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488537940003473:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.813625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:23.818053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:23.837609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488537940003476:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:23.898172Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488537940003529:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:24.150457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488520760131656:2168];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:24.150568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key ... Tx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.010896Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.012380Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.012447Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.012465Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.022175Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.022251Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.022267Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.026743Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.026833Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.026855Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.033686Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.033900Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.033924Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.037743Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.037793Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.037811Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.045541Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.045623Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.045665Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.049017Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.049115Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.049133Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.056764Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.056865Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.056885Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068438Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068439Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068496Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068506Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068514Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.068523Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.080137Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.080238Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.080263Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.081027Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.081074Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-02-19T08:22:07.081088Z node 5 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037953;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=54;result=not_found; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"E-Size":"0","Name":"Filter","Predicate":"Value \u003E 0","Blocks":"True","Inputs":[{"InternalOperatorId":1}],"E-Rows":"0","Pushdown":"True","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/OlapTable","E-Rows":"0","Table":"OlapTable","ReadColumns":["Key (-∞, +∞)","Value"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"0"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"OlapTable","ReadColumns":["Key (-∞, +∞)","Value"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Size":"0","Name":"Filter","Predicate":"Value \u003E 0","Blocks":"True","E-Rows":"0","Pushdown":"True","E-Cost":"0"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2026-02-19T08:22:05.623951Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488717666759729:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:05.624062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:06.157823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.215461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:06.215562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:06.254413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:06.447472Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.503957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:06.626026Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:06.834598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:06.834615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:06.834621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:06.834687Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:07.243539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.371235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:07.578444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:09.926963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.108825Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7608488739141597896:2358] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:42:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:22:10.349485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:10.544239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:22:10.626366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488717666759729:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:10.626526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:10.666416Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-02-19T08:22:10.666449Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-02-19T08:22:10.666462Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-02-19T08:22:10.666474Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-02-19T08:22:10.666494Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-02-19T08:22:10.666509Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-02-19T08:22:10.666521Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-02-19T08:22:10.666532Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-02-19T08:22:10.666544Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-02-19T08:22:10.666554Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-02-19T08:22:10.666565Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-02-19T08:22:10.666576Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-02-19T08:22:10.666587Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-02-19T08:22:10.666598Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-02-19T08:22:10.666608Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-02-19T08:22:10.666622Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-02-19T08:22:10.677540Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2026-02-19T08:22:10.677601Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-02-19T08:22:10.677624Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2026-02-19T08:22:10.677651Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-02-19T08:22:10.677675Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2026-02-19T08:22:10.677699Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2026-02-19T08:22:10.677772Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2026-02-19T08:22:12.874350Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488748231360167:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:12.874401Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:12.992808Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:13.125129Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:13.145995Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:13.146075Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:13.174822Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:13.196794Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:13.384467Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:13.384487Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:13.384492Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:13.384560Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:13.637609Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:13.724062Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.902053Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:13.936234Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:14.278993Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:14.421691Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:14.497974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:18.222756Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488774170944543:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:18.222802Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:18.387575Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:18.508456Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:18.529469Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:18.529572Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:18.549197Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:18.610815Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011299s 2026-02-19T08:22:18.621083Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007020s 2026-02-19T08:22:18.649018Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:18.675153Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:18.675173Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:18.675180Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:18.675259Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:18.891318Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:18.963047Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:19.135612Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:19.378034Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:19.378651Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608488778465913883:3486] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:22.791287Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608488791099152582:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:22.791377Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:22.806920Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:22.912140Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:22.932806Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:22.932910Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:22.964187Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:23.034261Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:23.034298Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:23.034307Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:23.034386Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:23.092590Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:23.261945Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:23.310267Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:23.500394Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |92.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |92.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |92.9%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:22:15.155825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:15.247786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:22:15.268684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:15.268785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:22:15.269259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:15.563159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:15.563304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:15.638926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489331375535 != 1771489331375539 2026-02-19T08:22:15.646967Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:15.695074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:15.799375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:22:16.255386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:16.273404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:16.384777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:16.422557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:22:16.423395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:22:16.423632Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:22:16.423890Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:22:16.434413Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:22:16.467509Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:22:16.467647Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:22:16.469081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:22:16.469162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:22:16.469213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:22:16.469574Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:22:16.469688Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:22:16.469812Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:22:16.470198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:22:16.503823Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:22:16.504052Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:22:16.504201Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:22:16.504257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:22:16.504296Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:22:16.504349Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:22:16.504616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:16.504664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:16.504930Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:22:16.505018Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:22:16.505091Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:22:16.505132Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:16.505186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:22:16.505226Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:22:16.505276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:22:16.505307Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:22:16.505374Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:22:16.505457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:16.505505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:16.505546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:22:16.505631Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:22:16.505673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:22:16.505805Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:22:16.506085Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:22:16.506146Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:22:16.506260Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:22:16.506323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:22:16.506368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:22:16.506417Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:22:16.506456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:22:16.506759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:22:16.506793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:22:16.506827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:22:16.506870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:22:16.506932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:22:16.506964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:22:16.507004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:22:16.507035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:22:16.507065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:22:16.508050Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:22:16.508091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:22:16.508116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:22:16.508142Z node 1 :TX_DATASHARD T ... 8:22:17.833501Z node 1 :TX_DATASHARD TRACE: datashard_write_operation.cpp:260: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2026-02-19T08:22:17.833599Z node 1 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:38:1] 2026-02-19T08:22:17.833697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-02-19T08:22:17.833767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:17.833819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-02-19T08:22:17.833857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:17.833901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:17.833961Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-02-19T08:22:17.834016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-02-19T08:22:17.834049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:17.834071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:17.834092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:22:17.834110Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:22:17.834131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:17.834151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:22:17.834171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-02-19T08:22:17.834236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:22:17.834277Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:6] at 72075186224037888 2026-02-19T08:22:17.834340Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-02-19T08:22:17.834469Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:6] at 72075186224037888, row count=1 2026-02-19T08:22:17.834520Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:22:17.834583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:22:17.834608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:22:17.834643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:22:17.834673Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:17.834706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-02-19T08:22:17.834732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:22:17.834776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:17.834815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:17.834860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-02-19T08:22:17.834879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:17.834914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2026-02-19T08:22:17.981262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [1:1174:2962], Recipient [1:891:2770]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:22:17.981470Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:22:17.981536Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-02-19T08:22:17.981582Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2026-02-19T08:22:17.981649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-02-19T08:22:17.981763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:17.981829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:22:17.981876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:17.981925Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:17.981986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-02-19T08:22:17.982025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:17.982071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:17.982096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:22:17.982122Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:22:17.982235Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:22:17.982545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayComplete 2026-02-19T08:22:17.982582Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:22:17.982620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:17.982652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:17.982721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:22:17.982761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:17.982790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-02-19T08:22:17.982833Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:22:18.068736Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 4000 2026-02-19T08:22:18.068850Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2026-02-19T08:22:18.207500Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:22:18.207590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:18.213855Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2026-02-19T08:22:18.214054Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:22:18.214213Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:22:18.214264Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:22:18.214324Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[1:1174:2962], 0} after executionsCount# 1 2026-02-19T08:22:18.214415Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[1:1174:2962], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:18.214557Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[1:1174:2962], 0} finished in read 2026-02-19T08:22:18.217883Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [1:1174:2962], Recipient [1:891:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:22:18.217999Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2026-02-19T08:22:05.352323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488718857081857:2163];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:05.352375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:06.065831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.134684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:06.134775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:06.270697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:06.376499Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.393841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:06.394382Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:06.570319Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.010881s 2026-02-19T08:22:06.598236Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.017904s 2026-02-19T08:22:06.597806Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007936s 2026-02-19T08:22:06.929943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:06.929969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:06.929976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:06.930062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:07.382339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.510740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:07.815636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:08.217133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:08.480416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:12.289767Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488748449845230:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:12.290271Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:12.365956Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.013288s 2026-02-19T08:22:12.428329Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:12.606138Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:12.606213Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:12.621621Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:12.642922Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:12.717369Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:12.784073Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:12.784226Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:12.784233Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:12.784309Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:13.036076Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:13.167333Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.370298Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:13.417984Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2026-02-19T08:22:14.645556Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:15.919443Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:15.978676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:16.131597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:16.381916Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:18.385936Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488772658553461:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:18.387086Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:18.452176Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:18.548188Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:18.565940Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:18.566026Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:18.591986Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:18.696203Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:18.696225Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:18.696232Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:18.696316Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:18.697360Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:18.941877Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:19.010089Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:19.199892Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:19.387813Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:15.690411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:15.792918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:15.809511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:15.809576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:15.810010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:16.090206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.090309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.152988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489272747332 != 1771489272747336 2026-02-19T08:21:16.162829Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.210082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.307526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:16.604673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.618534Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:16.724803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:16.759624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:16.760724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:16.761084Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:21:16.761345Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:16.771869Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:16.811080Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:16.811229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:16.813243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:21:16.813376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:21:16.813455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:21:16.813897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:16.814060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:16.814147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:21:16.824928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:16.870211Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:21:16.870471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:16.870622Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:21:16.870676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:21:16.870723Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:21:16.870792Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:21:16.871064Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.871136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:16.871505Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:21:16.871628Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:21:16.871729Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:21:16.871768Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:21:16.871806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:21:16.871848Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:21:16.871886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:21:16.871919Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:21:16.871982Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:21:16.872099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.872139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:16.872189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:21:16.872299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:21:16.872381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:21:16.872497Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:21:16.872786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:21:16.872846Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:21:16.872944Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:21:16.872997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:21:16.873043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:21:16.873096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:21:16.873136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:21:16.873468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:21:16.873514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:21:16.873558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:21:16.873598Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:21:16.873661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:21:16.873692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:21:16.873757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:21:16.873795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:21:16.873824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:21:16.875690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:21:16.875771Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:21:16.887721Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.862164Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037890, clientId# [11:1174:2981], serverId# [11:1175:2982], sessionId# [0:0:0] 2026-02-19T08:22:20.862263Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553169, Sender [11:1173:2980], Recipient [11:948:2802]: NKikimrTxDataShard.TEvGetInfoRequest 2026-02-19T08:22:20.862843Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [11:1178:2985], Recipient [11:948:2802]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.862873Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.862897Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037890, clientId# [11:1177:2984], serverId# [11:1178:2985], sessionId# [0:0:0] 2026-02-19T08:22:20.863017Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [11:1176:2983], Recipient [11:948:2802]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-02-19T08:22:20.863118Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-02-19T08:22:20.863148Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:20.863172Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037890 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-02-19T08:22:20.863206Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2026-02-19T08:22:20.863258Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-02-19T08:22:20.863276Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2026-02-19T08:22:20.863296Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:20.863314Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2026-02-19T08:22:20.863374Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2026-02-19T08:22:20.863403Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-02-19T08:22:20.863421Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:20.863438Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2026-02-19T08:22:20.863456Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2026-02-19T08:22:20.863521Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-02-19T08:22:20.863639Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037890 Complete read# {[11:1176:2983], 1002} after executionsCount# 1 2026-02-19T08:22:20.863668Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037890 read iterator# {[11:1176:2983], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:20.863718Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037890 read iterator# {[11:1176:2983], 1002} finished in read 2026-02-19T08:22:20.863756Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-02-19T08:22:20.863773Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2026-02-19T08:22:20.863808Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:22:20.863825Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:22:20.863856Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-02-19T08:22:20.863871Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:22:20.863888Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037890 has finished 2026-02-19T08:22:20.863906Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-02-19T08:22:20.863961Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-02-19T08:22:20.864413Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [11:1181:2988], Recipient [11:943:2797]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.864440Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.864464Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037891, clientId# [11:1180:2987], serverId# [11:1181:2988], sessionId# [0:0:0] 2026-02-19T08:22:20.864544Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553169, Sender [11:1179:2986], Recipient [11:943:2797]: NKikimrTxDataShard.TEvGetInfoRequest 2026-02-19T08:22:20.865038Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [11:1184:2991], Recipient [11:943:2797]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.865066Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:20.865090Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037891, clientId# [11:1183:2990], serverId# [11:1184:2991], sessionId# [0:0:0] 2026-02-19T08:22:20.865217Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [11:1182:2989], Recipient [11:943:2797]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-02-19T08:22:20.865302Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-02-19T08:22:20.865336Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037891 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:20.865362Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037891 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-02-19T08:22:20.865392Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2026-02-19T08:22:20.865435Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-02-19T08:22:20.865473Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2026-02-19T08:22:20.865493Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:20.865509Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2026-02-19T08:22:20.865547Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2026-02-19T08:22:20.865573Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-02-19T08:22:20.865591Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:20.865604Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2026-02-19T08:22:20.865622Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2026-02-19T08:22:20.865676Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-02-19T08:22:20.865802Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037891 Complete read# {[11:1182:2989], 1003} after executionsCount# 1 2026-02-19T08:22:20.865850Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037891 read iterator# {[11:1182:2989], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:22:20.865896Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037891 read iterator# {[11:1182:2989], 1003} finished in read 2026-02-19T08:22:20.865930Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-02-19T08:22:20.865947Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2026-02-19T08:22:20.865963Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2026-02-19T08:22:20.865979Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2026-02-19T08:22:20.866019Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-02-19T08:22:20.866043Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2026-02-19T08:22:20.866070Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037891 has finished 2026-02-19T08:22:20.866101Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-02-19T08:22:20.866174Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::ImmediateWriteVolatileTxIdOnPageFault [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:21:15.944887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:16.047028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:21:16.061220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:21:16.061281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:21:16.061769Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:21:16.351359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:16.351503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:16.446788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489273028120 != 1771489273028124 2026-02-19T08:21:16.461028Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:16.507488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:16.598461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:21:16.896080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:16.910320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:17.020087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.072788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:21:17.074296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:21:17.074652Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:21:17.074930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:21:17.085834Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:21:17.133413Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:21:17.133552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:21:17.135021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:21:17.135143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:21:17.135197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:21:17.135496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:21:17.135639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:21:17.135713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:21:17.146662Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:21:17.197019Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:21:17.197267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:21:17.197386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:21:17.197457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:21:17.197503Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:21:17.197543Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:21:17.197807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:17.197863Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:21:17.198283Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:21:17.198408Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:21:17.198515Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:21:17.198630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:21:17.198674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:21:17.198712Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:21:17.198747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:21:17.198780Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:21:17.198833Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:21:17.198938Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:17.198979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:21:17.199034Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:21:17.199140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:21:17.199184Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:21:17.199317Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:21:17.199596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:21:17.199653Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:21:17.199760Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:21:17.199806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:21:17.199850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:21:17.199891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:21:17.199978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:21:17.200342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:21:17.200390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:21:17.200449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:21:17.200502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:21:17.200571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:21:17.200603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:21:17.200636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:21:17.200679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:21:17.200705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:21:17.202749Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:21:17.202809Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:21:17.214162Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... ProposeTransaction 2026-02-19T08:22:33.071067Z node 12 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-02-19T08:22:33.071226Z node 12 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:22:33.071313Z node 12 :TX_DATASHARD TRACE: datashard_write_operation.cpp:260: Table /Root/table, shard: 72075186224037888, write point (Int32 : 2) 2026-02-19T08:22:33.071367Z node 12 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 2) table: [72057594046644480:38:1] 2026-02-19T08:22:33.071461Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037888 on unit CheckWrite 2026-02-19T08:22:33.071518Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037888 is Executed 2026-02-19T08:22:33.071568Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CheckWrite 2026-02-19T08:22:33.071613Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:22:33.071649Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:22:33.071686Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:33.071751Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037888 2026-02-19T08:22:33.071787Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037888 is Executed 2026-02-19T08:22:33.071811Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:22:33.071855Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:22:33.071875Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:22:33.071895Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037888 is Executed 2026-02-19T08:22:33.071918Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:22:33.071940Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037888 to execution unit ExecuteWrite 2026-02-19T08:22:33.071958Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:22:33.071988Z node 12 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:1] at 72075186224037888 2026-02-19T08:22:33.072043Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:33.072338Z node 12 :TX_DATASHARD TRACE: execute_write_unit.cpp:185: Tablet 72075186224037888 is not ready for [0:1] execution 2026-02-19T08:22:33.072404Z node 12 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:511: tx 1 at 72075186224037888 released its data 2026-02-19T08:22:33.072450Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037888 is Restart 2026-02-19T08:22:33.072637Z node 12 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-02-19T08:22:33.072683Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:22:33.072726Z node 12 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:1] at 72075186224037888 2026-02-19T08:22:33.072862Z node 12 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:22:33.072941Z node 12 :TX_DATASHARD TRACE: datashard_write_operation.cpp:260: Table /Root/table, shard: 72075186224037888, write point (Int32 : 2) 2026-02-19T08:22:33.072993Z node 12 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 2) table: [72057594046644480:38:1] 2026-02-19T08:22:33.073066Z node 12 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:604: tx 1 at 72075186224037888 restored its data 2026-02-19T08:22:33.073133Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:33.073463Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037888 is Continue 2026-02-19T08:22:33.073527Z node 12 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:22:33.073671Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269681160, Sender [12:69:2116], Recipient [12:1177:2974]: NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:22:33.073721Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3240: StateWork, processing event TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:22:33.073804Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [12:1177:2974], Recipient [12:1177:2974]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:33.073833Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:22:33.073900Z node 12 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:22:33.073947Z node 12 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:22:33.073999Z node 12 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037888 for ExecuteWrite 2026-02-19T08:22:33.074040Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:22:33.074083Z node 12 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:281474976715664] at 72075186224037888 2026-02-19T08:22:33.074126Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:22:33.074280Z node 12 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:281474976715664] at 72075186224037888, row count=1 2026-02-19T08:22:33.074463Z node 12 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:22:33.074531Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:22:33.074577Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:22:33.074616Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:22:33.074650Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:33.074721Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:22:33.074772Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:22:33.074826Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:22:33.074860Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:22:33.074896Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:22:33.074921Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:22:33.074960Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2026-02-19T08:22:33.075006Z node 12 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:22:33.075041Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:22:33.075086Z node 12 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:22:33.075117Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:22:33.075676Z node 12 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:22:33.075738Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:22:33.075794Z node 12 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:22:33.075884Z node 12 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:22:33.279814Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [12:1233:3014], Recipient [12:1177:2974]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:33.279953Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:22:33.280045Z node 12 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [12:1232:3013], serverId# [12:1233:3014], sessionId# [0:0:0] 2026-02-19T08:22:33.280285Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553224, Sender [12:835:2729], Recipient [12:1177:2974]: NKikimr::TEvDataShard::TEvGetOpenTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2026-02-19T08:22:06.146470Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488722314331298:2173];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:06.146535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:06.242144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:06.708735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.776677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:06.776816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:06.855834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:07.069203Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:07.098322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:07.217588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:07.250207Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.023352s 2026-02-19T08:22:07.274365Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.016301s 2026-02-19T08:22:07.249990Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012016s 2026-02-19T08:22:07.469669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:07.469695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:07.469702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:07.469802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:07.802923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.910986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:08.090784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:08.288783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:12.813919Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488750078803911:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:12.814736Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:12.857638Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:12.981072Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:13.007938Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:13.019335Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:13.019410Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:13.026593Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:13.290376Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:13.290395Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:13.290400Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:13.290484Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:13.296805Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:13.549751Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:13.672538Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.856640Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:13.865298Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:14.106183Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:22:14.189055Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } records { sequence_number: "68" shard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2026-02-19T08:22:17.786094Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488750078803911:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:17.786177Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } 2026-02-19T08:22:21.872938Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488785982269596:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:21.873009Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:21.894507Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:21.979300Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:21.994951Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:21.995016Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:22.016213Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:22.117045Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:22.117071Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:22.117077Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:22.117160Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:22.157835Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:22.349285Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:22.406845Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:22.609477Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |92.9%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |92.9%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |92.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TxUsage::WriteToTopic_Demo_40_Table >> BasicUsage::CreateTopicWithCustomName >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TxUsage::WriteToTopic_Demo_11_Table >> KqpSystemView::TopQueriesOrderByDesc |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpSystemView::PartitionStatsParametricRanges >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table >> KqpSystemView::PartitionStatsRange1 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpSysColV1::StreamSelectRowAsterisk >> TxUsage::WriteToTopic_Demo_16_Table >> BasicUsage::ReadBinaryProducerIdFromLowLevelWrite |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> KqpScanSpilling::SelfJoin |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink |92.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TStorageTenantTest::GenericCases |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::CreateTableInsideSubDomain >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::Boot >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::LsLs >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> KqpSystemView::PartitionStatsRange1 [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpPg::TableDeleteWhere-useSink [GOOD] >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: 2026-02-19T08:22:40.689146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488868247370295:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.693318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:41.041514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.041606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.081383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.101198Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.103181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488868247370157:2081] 1771489360677475 != 1771489360677478 2026-02-19T08:22:41.113225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.298436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:41.298485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:41.298493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:41.298596Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.335375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.697407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:41.761862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.770620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:41.832810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.980563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.144736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.215797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.048306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885427241215:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.048406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.048820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885427241225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.048871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.425175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.466321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.501044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.530494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.562440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.607126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.650609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.730018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.819211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885427242098:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.819299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.819483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885427242104:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.819526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885427242103:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.819559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.823404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:44.837142Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488885427242107:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:44.919033Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488885427242158:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:45.685853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488868247370295:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.685949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:46.832587Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489366825, txId: 281474976710673] shutting down >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: 2026-02-19T08:22:40.770926Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488869229321291:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.770988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:41.222218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.222345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.280260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.286903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.366986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488869229321253:2081] 1771489360762897 != 1771489360762900 2026-02-19T08:22:41.371290Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.461957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:41.461980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:41.461987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:41.462074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.504590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.809867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:41.903307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.908741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:41.965019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.156096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.316544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.389499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.259781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488886409192310:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.259924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.260299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488886409192320:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.260334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.626387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.666328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.705092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.733349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.765773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.817221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.872482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.919189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.006080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488890704160487:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.006175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.006593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488890704160493:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.006635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488890704160492:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.006743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.010471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:45.021530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488890704160496:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:45.084187Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488890704160547:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:45.773490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488869229321291:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.773569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:46.747748Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489366787, txId: 281474976710673] shutting down >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] Test command err: RandomSeed# 14051867382472562955 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 2 *** performing bridge rang ... sk2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: 2026-02-19T08:22:40.759806Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488868015560876:2252];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.759917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:41.051610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.051711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.089583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.113377Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.120675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488868015560661:2081] 1771489360742003 != 1771489360742006 2026-02-19T08:22:41.128690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.247344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:41.247368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:41.247379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:41.247467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.292433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.692824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.761827Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:41.769248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.945330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.102517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:42.177446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.050944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885195431725:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.051116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.051686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885195431735:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.051743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.379183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.418803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.459199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.498796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.530715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.566954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.621019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.694519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.783813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885195432607:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.783921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.784456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885195432612:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.784501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488885195432613:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.784720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.788934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:44.805780Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488885195432616:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:44.864217Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488885195432667:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:45.759219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488868015560876:2252];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.759282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:47.140767Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489367128, txId: 281474976710673] shutting down >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithManyPartitions >> BasicUsage::ReadBinaryProducerIdFromLowLevelWrite [GOOD] >> BasicUsage::CreateTopicWithStreamingConsumer >> KqpScanLogs::WideCombine+EnabledLogs [GOOD] >> KqpScanLogs::WideCombine-EnabledLogs |93.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpScanSpilling::SelfJoin [GOOD] >> TConsoleTests::TestCreateTenant >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] |93.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |93.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |93.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] >> KqpSystemView::TopQueriesOrderByDesc [GOOD] >> TStorageTenantTest::GenericCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2026-02-19T08:22:06.059764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488723358954611:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:06.061506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:06.116927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:06.494770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:06.547096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:06.547231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:06.759103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:06.807112Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:06.815758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:06.988348Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.026877s 2026-02-19T08:22:07.017920Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.025841s 2026-02-19T08:22:07.078113Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:07.082595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:07.082614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:07.082626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:07.082710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:07.365388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:07.458186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:07.646574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:07.997070Z node 1 :PERSQUEUE ERROR: partition_read.cpp:831: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2026-02-19T08:22:07.997171Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2026-02-19T08:22:11.654823Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488743434073335:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:11.655401Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:11.726625Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:11.988148Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:12.149802Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:12.218750Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:12.218826Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:12.228138Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:12.381687Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:12.381736Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:12.381743Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:12.381825Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:12.455355Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:12.606892Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:12.676923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:12.680531Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:12.890861Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:16.649871Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608488743434073335:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:16.649968Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:27.012712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:22:27.012743Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:39.655243Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608488865694137724:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:39.655977Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:39.703931Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:39.816189Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:39.843821Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:39.844333Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:39.882370Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:39.967741Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:39.978891Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:39.978914Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:39.978923Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:39.979327Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:40.245304Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:40.320638Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:40.555614Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:40.709543Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:45.068682Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608488888075229496:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.068830Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:45.101703Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:45.253981Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:45.255131Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:45.279813Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:45.279930Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:45.313432Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:45.368534Z node 11 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007925s 2026-02-19T08:22:45.370834Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007864s 2026-02-19T08:22:45.432039Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:45.432077Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:45.432093Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:45.432203Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:45.701017Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:45.781515Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:45.812698Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:46.081160Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:22:46.097908Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query |93.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |93.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |93.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleTests::TestCreateSharedTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003709/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk8 2026-02-19T08:17:44.695359Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487595396826872:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:44.695421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:45.125846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:45.165632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:45.165753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:45.267368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:45.307244Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:45.309525Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487595396826707:2081] 1771489064688809 != 1771489064688812 2026-02-19T08:17:45.350373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:45.586399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:45.586425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:45.586432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:45.586516Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:45.705288Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:46.257127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:46.267059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:46.340367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.552416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.834140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.925563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.320503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487616871665069:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.320651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.321025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487616871665079:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.321064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:49.697807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487595396826872:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:49.697910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:49.760638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.822998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.867894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.912440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.963106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.010945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.058690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.156991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:50.346852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621166633247:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.346956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.347503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621166633252:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.347553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621166633253:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.347667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.352190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:50.371342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487621166633256:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:17:50.435783Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487621166633307:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:00.053820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:18:00.053855Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '730) '('"_id" '"555b77ed-e390cac5-f75c6d7d-2f7c7c36") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV2")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '686) '('"_id" '"7d9ce570-7f34d4a6-6065d882-7aa61edc") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '698) '('"_id" '"73414a2c-994e79c0-f6ea4c39-f9732277")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2026-02-19T08:22:46.162003Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488893235484333:2093];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.162605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.483653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.517216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:46.517319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:46.526179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:46.630629Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:46.668605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.889791Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488893235484524:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:46.889847Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488893235484979:2441] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:46.889992Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488893235484549:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:46.890163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488893235484758:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488893235484549:2158], cookie# 1 2026-02-19T08:22:46.891785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488893235484813:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484810:2292], cookie# 1 2026-02-19T08:22:46.891819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488893235484814:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484811:2292], cookie# 1 2026-02-19T08:22:46.891835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488893235484815:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484812:2292], cookie# 1 2026-02-19T08:22:46.891866Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888940516870:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484813:2292], cookie# 1 2026-02-19T08:22:46.891937Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888940516873:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484814:2292], cookie# 1 2026-02-19T08:22:46.891959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888940516876:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488893235484815:2292], cookie# 1 2026-02-19T08:22:46.892008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488893235484813:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888940516870:2051], cookie# 1 2026-02-19T08:22:46.892024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488893235484814:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888940516873:2054], cookie# 1 2026-02-19T08:22:46.892050Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488893235484815:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888940516876:2057], cookie# 1 2026-02-19T08:22:46.892088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488893235484758:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488893235484810:2292], cookie# 1 2026-02-19T08:22:46.892128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488893235484758:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:46.892152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488893235484758:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488893235484811:2292], cookie# 1 2026-02-19T08:22:46.892183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488893235484758:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:46.892211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488893235484758:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488893235484812:2292], cookie# 1 2026-02-19T08:22:46.892224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488893235484758:2292][/dc-1] Sync cookie mismatch: sender# [1:7608488893235484812:2292], cookie# 1, current cookie# 0 2026-02-19T08:22:46.892276Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488893235484549:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:46.903547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488893235484549:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488893235484758:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:46.903655Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488893235484549:2158], cacheItem# { Subscriber: { Subscriber: [1:7608488893235484758:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:46.915902Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488893235484980:2442], recipient# [1:7608488893235484979:2441], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:46.916004Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488893235484979:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:46.961656Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488893235484979:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:46.977562Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488893235484979:2441] Handle TEvDescribeSchemeResult Forward to# [1:7608488893235484978:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:46.982759Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488893235484524:2144] Handle TEvProposeTransaction 2026-02-19T08:22:46.982778Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488893235484524:2144] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:22:46.982827Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488893235484524:2144] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608488893235484991:2449] 2026-02-19T08:22:47.120139Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608488893235484991:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:22:47.120190Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# ... 80, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2026-02-19T08:22:47.589325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2026-02-19T08:22:47.589340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2026-02-19T08:22:47.589350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2026-02-19T08:22:47.589387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2026-02-19T08:22:47.589401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7608488897530452735:2309] 2026-02-19T08:22:47.589530Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488888940516870:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7608488896613945381:2112] 2026-02-19T08:22:47.589651Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488893235484549:2158], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1771489367186 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7608488893235484758:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771489367186 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7608488893235484758:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1771489367186 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-02-19T08:22:47.591075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:22:47.591126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:22:47.591139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:22:47.591147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:22:47.591157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:22:47.593970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488888940516873:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7608488896613945382:2112] 2026-02-19T08:22:47.594004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488888940516876:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7608488896613945383:2112] 2026-02-19T08:22:47.594987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2026-02-19T08:22:47.595022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2026-02-19T08:22:47.595158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:22:47.595533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2026-02-19T08:22:47.595748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:22:47.595964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2026-02-19T08:22:47.596091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:22:47.596209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2026-02-19T08:22:47.596327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:22:47.596480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-02-19T08:22:47.596625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:22:47.596659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-02-19T08:22:47.596780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2026-02-19T08:22:47.596909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:22:47.596926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-02-19T08:22:47.596963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:22:47.600329Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2026-02-19T08:22:47.600481Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2026-02-19T08:22:47.600520Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2026-02-19T08:22:47.600587Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2026-02-19T08:22:47.612195Z node 1 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-02-19T08:22:47.617156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:22:47.617175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:22:47.617209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:22:47.617217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:22:47.617233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:22:47.617240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:22:47.617254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:22:47.617276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:22:47.617300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:22:47.617324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:22:47.650732Z node 1 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2026-02-19T08:22:46.055024Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488894683366974:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.055069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.314519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.340259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:46.340360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:46.350239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:46.443433Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:46.495421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.654732Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488894683367099:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:46.654801Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488894683367554:2443] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:46.654927Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488894683367124:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:46.655086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488894683367337:2299][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488894683367124:2158], cookie# 1 2026-02-19T08:22:46.656660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894683367390:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367387:2299], cookie# 1 2026-02-19T08:22:46.656700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894683367391:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367388:2299], cookie# 1 2026-02-19T08:22:46.656776Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890388399444:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367390:2299], cookie# 1 2026-02-19T08:22:46.656791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894683367392:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367389:2299], cookie# 1 2026-02-19T08:22:46.656845Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890388399447:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367391:2299], cookie# 1 2026-02-19T08:22:46.656861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894683367390:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890388399444:2051], cookie# 1 2026-02-19T08:22:46.656867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890388399450:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894683367392:2299], cookie# 1 2026-02-19T08:22:46.656897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894683367391:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890388399447:2054], cookie# 1 2026-02-19T08:22:46.656935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894683367392:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890388399450:2057], cookie# 1 2026-02-19T08:22:46.656966Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894683367337:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894683367387:2299], cookie# 1 2026-02-19T08:22:46.656995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488894683367337:2299][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:46.657035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894683367337:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894683367388:2299], cookie# 1 2026-02-19T08:22:46.657060Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488894683367337:2299][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:46.657097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894683367337:2299][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894683367389:2299], cookie# 1 2026-02-19T08:22:46.657177Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488894683367124:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:46.657209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488894683367337:2299][/dc-1] Sync cookie mismatch: sender# [1:7608488894683367389:2299], cookie# 1, current cookie# 0 2026-02-19T08:22:46.683541Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488894683367124:2158], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488894683367337:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:46.683679Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488894683367124:2158], cacheItem# { Subscriber: { Subscriber: [1:7608488894683367337:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:46.686498Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488894683367556:2445], recipient# [1:7608488894683367554:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:46.686568Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488894683367554:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:46.754710Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488894683367554:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:46.758341Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488894683367554:2443] Handle TEvDescribeSchemeResult Forward to# [1:7608488894683367553:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:46.766713Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488894683367099:2144] Handle TEvProposeTransaction 2026-02-19T08:22:46.766747Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488894683367099:2144] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:22:46.766818Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488894683367099:2144] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608488894683367561:2449] 2026-02-19T08:22:46.874217Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608488894683367561:2449] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:22:46.874270Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# ... 68897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:22:48.370516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2026-02-19T08:22:48.370759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-02-19T08:22:48.370928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2026-02-19T08:22:48.371047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:22:48.371172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2026-02-19T08:22:48.371332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:22:48.371463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2026-02-19T08:22:48.371577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-02-19T08:22:48.371677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2026-02-19T08:22:48.371750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:22:48.371858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2026-02-19T08:22:48.371950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-02-19T08:22:48.372026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2026-02-19T08:22:48.372086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:22:48.372150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2026-02-19T08:22:48.372247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:22:48.372263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2026-02-19T08:22:48.372291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-02-19T08:22:48.372377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:22:48.372387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-02-19T08:22:48.372477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2026-02-19T08:22:48.386296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:22:48.386343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:22:48.386384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:7 2026-02-19T08:22:48.386391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-02-19T08:22:48.386408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:22:48.386414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:22:48.386428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:22:48.386434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:22:48.386448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:6 2026-02-19T08:22:48.386458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-02-19T08:22:48.386474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:22:48.386479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:22:48.386503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:8 2026-02-19T08:22:48.386510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-02-19T08:22:48.386530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:22:48.386552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-02-19T08:22:48.386581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2026-02-19T08:22:48.386653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:22:48.386679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:22:48.386695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-02-19T08:22:48.386767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:22:48.389125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:22:48.670891Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:48.671300Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488896959822679:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.671408Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488901254790309:2307], recipient# [3:7608488901254790308:2310], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:49.675140Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488896959822679:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:49.675347Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488905549757608:2309], recipient# [3:7608488905549757607:2311], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.682307Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488896959822679:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.682530Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488909844724906:2310], recipient# [3:7608488909844724905:2312], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/ynks/00370a/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk5 2026-02-19T08:17:44.263073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487596056861704:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:44.271108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:44.340543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:44.737228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:44.786498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:44.786590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:44.941942Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487596056861565:2081] 1771489064215871 != 1771489064215874 2026-02-19T08:17:44.960611Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:44.969097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:44.987866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:45.302021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:45.378429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:45.378451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:45.378458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:45.378535Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:46.480929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:46.499069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:17:46.624210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.988855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.404726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.554841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.261819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487596056861704:2177];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:49.261897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:50.404932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621826667224:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.405149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.414351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487621826667236:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.414473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.966333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.002423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.045339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.088935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.132919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.176689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.227605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.301818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.376467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487626121635401:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.376585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.376986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487626121635406:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.377024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487626121635407:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.377127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.381175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:51.401677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487626121635410:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:17:51.459143Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487626121635463:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, requ ... : /Root. }. CA StateFunc 271646927 2026-02-19T08:22:50.511931Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984627:2591], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.511968Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:7608488913383984627:2591], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:22:50.512177Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646923 2026-02-19T08:22:50.512195Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710684, task: 3. Finish input channelId: 3, from: [2:7608488913383984627:2591] 2026-02-19T08:22:50.512212Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.512244Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984627:2591], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646927 2026-02-19T08:22:50.512257Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984627:2591], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.512311Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710684, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [10] 2026-02-19T08:22:50.512323Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710684, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [11] 2026-02-19T08:22:50.512337Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710684, task: 2. Tasks execution finished 2026-02-19T08:22:50.512355Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [2:7608488913383984627:2591], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:22:50.512422Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710684, task: 2. pass away 2026-02-19T08:22:50.512507Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710684;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:22:50.512791Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.512960Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513036Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513447Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513544Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513597Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513626Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:22:50.513951Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.513968Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:22:50.514057Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.514087Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:22:50.514326Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:22:50.514358Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710684, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2026-02-19T08:22:50.514366Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710684, task: 3. Tasks execution finished 2026-02-19T08:22:50.514377Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [2:7608488913383984628:2592], TxId: 281474976710684, task: 3. Ctx: { TraceId : 01khtfx7cj9e1gj1gptykc329a. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MzI3MThkZDQtMTMzZDkwNTItMzM2MTk2ZGItZWMxM2QxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:22:50.514445Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710684, task: 3. pass away 2026-02-19T08:22:50.514513Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710684;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:22:50.515701Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489370301, txId: 281474976710683] shutting down |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: 2026-02-19T08:16:46.081538Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487348288171407:2190];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:16:46.081614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:16:46.334102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:16:46.347891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:16:46.348021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:16:46.444272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:16:46.449347Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:16:46.451655Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487348288171255:2081] 1771489006063535 != 1771489006063538 2026-02-19T08:16:46.566194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:16:46.621540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:16:46.621569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:16:46.621584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:16:46.621732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:16:47.052187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:16:47.081506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:16:47.094206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:16:49.098866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: value too long for type character(2) 2026-02-19T08:16:49.290844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: value too long for type character(2) 2026-02-19T08:16:49.363028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-02-19T08:16:49.513985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-02-19T08:16:49.657204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-02-19T08:16:49.801786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"abcd ","abcd "} 2026-02-19T08:16:49.892234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: value too long for type character varying(2) 2026-02-19T08:16:49.997871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: value too long for type character varying(2) 2026-02-19T08:16:50.056062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-02-19T08:16:50.180943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-02-19T08:16:50.273730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-02-19T08:16:50.353208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-02-19T08:16:50.445885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: bit string length 4 does not match type bit(2) 2026-02-19T08:16:50.512539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: bit string length 4 does not match type bit(2) 2026-02-19T08:16:50.581932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 1111 2026-02-19T08:16:50.677814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {1111,1111} 2026-02-19T08:16:50.770347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: bit string length 4 does not match type bit(6) 2026-02-19T08:16:50.839710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:45: ERROR: bit string length 4 does not match t ... suboperation type: ESchemeOpCreateTable, opId: 281474976710844:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.263417Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:43.301137Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710846:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 869 2026-02-19T08:22:43.441006Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.531280Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:43.633076Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710850:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 650 2026-02-19T08:22:43.755368Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.872349Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710852:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.970201Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 829 2026-02-19T08:22:43.995133Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710854:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.115920Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710855:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 774 2026-02-19T08:22:44.226925Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:44.262891Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710857:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.358807Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:44.393034Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710859:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2950 2026-02-19T08:22:44.532967Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710860:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.638049Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:44.687173Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710862:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.781909Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 114 2026-02-19T08:22:44.815444Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710864:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.933165Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710865:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3802 2026-02-19T08:22:45.056814Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710866:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.175526Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.266134Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 4072 2026-02-19T08:22:45.299633Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.424628Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710870:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.530746Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 142 2026-02-19T08:22:45.568729Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710872:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.678831Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:45.681067Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710874 at tablet 72075186224037962 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710874] at 72075186224037962 while waiting for scan finish) | 2026-02-19T08:22:45.682091Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710874 at tablet 72075186224037962 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710874] at 72075186224037962 while waiting for scan finish) | 2026-02-19T08:22:45.718977Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710875:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3615 2026-02-19T08:22:45.836685Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710876:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.115482Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710877:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3614 2026-02-19T08:22:46.271780Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710878:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.359803Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:46.387735Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710880:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.505781Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 22 2026-02-19T08:22:46.574182Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710882:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.725101Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:22:46.777385Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710884:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.949820Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2026-02-19T08:22:45.178111Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488888901219718:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.178177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:45.224888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:45.512793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:45.553144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:45.553302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:45.563963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:45.648943Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:45.711708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:45.904049Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488888901219927:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:45.904104Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488888901220374:2433] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:45.904199Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:45.904288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488888901220210:2312][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488888901219941:2146], cookie# 1 2026-02-19T08:22:45.905743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488888901220218:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220215:2312], cookie# 1 2026-02-19T08:22:45.905795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488888901220219:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220216:2312], cookie# 1 2026-02-19T08:22:45.905815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488888901220220:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220217:2312], cookie# 1 2026-02-19T08:22:45.905847Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888901219570:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220218:2312], cookie# 1 2026-02-19T08:22:45.905877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888901219573:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220219:2312], cookie# 1 2026-02-19T08:22:45.905915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488888901219576:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488888901220220:2312], cookie# 1 2026-02-19T08:22:45.905967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488888901220218:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901219570:2050], cookie# 1 2026-02-19T08:22:45.905993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488888901220219:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901219573:2053], cookie# 1 2026-02-19T08:22:45.906009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488888901220220:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901219576:2056], cookie# 1 2026-02-19T08:22:45.906049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488888901220210:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901220215:2312], cookie# 1 2026-02-19T08:22:45.906077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488888901220210:2312][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:45.906125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488888901220210:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901220216:2312], cookie# 1 2026-02-19T08:22:45.906149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488888901220210:2312][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:45.906176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488888901220210:2312][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488888901220217:2312], cookie# 1 2026-02-19T08:22:45.906189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488888901220210:2312][/dc-1] Sync cookie mismatch: sender# [1:7608488888901220217:2312], cookie# 1, current cookie# 0 2026-02-19T08:22:45.906252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488888901219941:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:45.916220Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488888901219941:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488888901220210:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:45.916342Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488888901220210:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:45.919093Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488888901220375:2434], recipient# [1:7608488888901220374:2433], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:45.919161Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488888901220374:2433] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:45.964816Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488888901220374:2433] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:45.973578Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488888901220374:2433] Handle TEvDescribeSchemeResult Forward to# [1:7608488888901220373:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:45.990028Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488888901219927:2143] Handle TEvProposeTransaction 2026-02-19T08:22:45.990056Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488888901219927:2143] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:22:45.990124Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488888901219927:2143] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608488888901220383:2441] 2026-02-19T08:22:46.106850Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608488888901220383:2441] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage- ... 608488888901219570:2050] 2026-02-19T08:22:49.938890Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7608488906081090512:3136][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7608488906081090525:3136] 2026-02-19T08:22:49.938938Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7608488906081090512:3136][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7608488888901219941:2146], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:49.938960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488888901219570:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7608488906081090528:3136] 2026-02-19T08:22:49.945889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488906081090512:3136] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:49.949851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488906081090532:3138], recipient# [1:7608488906081090508:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.173881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488888901219718:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:50.173961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:50.226607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.226744Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488893196187737:2486] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:50.226826Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488910376057838:3144], recipient# [1:7608488910376057837:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.955409Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.955556Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488906081090512:3136] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:50.955832Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488910376057856:3149], recipient# [1:7608488910376057853:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.180208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.180356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488893196187737:2486] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:51.180444Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488914671025158:3151], recipient# [1:7608488914671025157:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.230321Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.230484Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488893196187737:2486] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:51.230566Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488914671025160:3152], recipient# [1:7608488914671025159:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.956396Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488888901219941:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.956515Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488888901219941:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488906081090512:3136] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:51.956584Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488914671025177:3156], recipient# [1:7608488914671025176:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TStorageTenantTest::LsLs [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2026-02-19T08:22:47.069292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488899647309302:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:47.069370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:47.399051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.429079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:47.429178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:47.441396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:47.522229Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:47.620968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.109330Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488899647309517:2145], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.109411Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7608488899647309517:2145], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-02-19T08:22:48.109647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:22:48.116223Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895352341858:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488903942277267:2438] 2026-02-19T08:22:48.116249Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895352341861:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488903942277268:2438] 2026-02-19T08:22:48.116270Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895352341858:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:48.116282Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895352341861:2053] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:48.116372Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7608488895352341861:2053] Subscribe: subscriber# [1:7608488903942277268:2438], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:22:48.116380Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7608488895352341858:2050] Subscribe: subscriber# [1:7608488903942277267:2438], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:22:48.116439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895352341864:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488903942277269:2438] 2026-02-19T08:22:48.116448Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895352341864:2056] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:48.116455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7608488903942277267:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488895352341858:2050] 2026-02-19T08:22:48.116473Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7608488895352341864:2056] Subscribe: subscriber# [1:7608488903942277269:2438], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:22:48.116479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7608488903942277268:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488895352341861:2053] 2026-02-19T08:22:48.116538Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341858:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7608488903942277267:2438] 2026-02-19T08:22:48.116539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7608488903942277269:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488895352341864:2056] 2026-02-19T08:22:48.116557Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341861:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7608488903942277268:2438] 2026-02-19T08:22:48.116574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488903942277264:2438] 2026-02-19T08:22:48.116583Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341864:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7608488903942277269:2438] 2026-02-19T08:22:48.116638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488903942277265:2438] 2026-02-19T08:22:48.116675Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7608488899647309517:2145], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:48.116705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488903942277266:2438] 2026-02-19T08:22:48.116736Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7608488903942277263:2438][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7608488899647309517:2145], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:48.116782Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488899647309517:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2026-02-19T08:22:48.116882Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488899647309517:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7608488903942277263:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:48.117631Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488899647309511:2143] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:48.117734Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488903942277271:2440] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:48.137850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:48.144547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488899647309517:2145], cacheItem# { Subscriber: { Subscriber: [1:7608488903942277263:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:48.144938Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488899647309517:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.145056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488899647309742:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488899647309517:2145], cookie# 1 2026-02-19T08:22:48.145124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899647309796:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899647309793:2289], cookie# 1 2026-02-19T08:22:48.145158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899647309797:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899647309794:2289], cookie# 1 2026-02-19T08:22:48.154057Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488903942277272:2441], recipient# [1:7608488903942277262:2291], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.154158Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895352341858:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899647309796:2289], cookie# 1 2026-02-19T08:22:48.154195Z node 1 : ... 81474976715661 2026-02-19T08:22:48.806950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:7608488899647309871:2377] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 7 }: sender# [1:7608488895352341861:2053], cookie# 281474976715661 2026-02-19T08:22:48.806977Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:7608488899647309872:2378] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] Version: 1 }: sender# [1:7608488895352341864:2056], cookie# 281474976715661 2026-02-19T08:22:48.807000Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:7608488899647309872:2378] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 7 }: sender# [1:7608488895352341864:2056], cookie# 281474976715661 2026-02-19T08:22:48.807024Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] Version: 1 }: sender# [1:7608488899647309870:2376], cookie# 281474976715661 2026-02-19T08:22:48.807046Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 7 }: sender# [1:7608488899647309870:2376], cookie# 281474976715661 2026-02-19T08:22:48.807086Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] Version: 1 }: sender# [1:7608488899647309871:2377], cookie# 281474976715661 2026-02-19T08:22:48.807104Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608488899647309863:2371] Ack update: ack to# [1:7608488899647309617:2202], cookie# 281474976715661, pathId# [OwnerId: 72057594046644480, LocalPathId: 4], version# 1 2026-02-19T08:22:48.807126Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 7 }: sender# [1:7608488899647309871:2377], cookie# 281474976715661 2026-02-19T08:22:48.807136Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608488899647309863:2371] Ack update: ack to# [1:7608488899647309617:2202], cookie# 281474976715661, pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 7 2026-02-19T08:22:48.807177Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 4] Version: 1 }: sender# [1:7608488899647309872:2378], cookie# 281474976715661 2026-02-19T08:22:48.807186Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:7608488899647309863:2371] Ack for unknown update (already acked?): sender# [1:7608488899647309872:2378], cookie# 281474976715661 2026-02-19T08:22:48.807225Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:7608488899647309863:2371] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 7 }: sender# [1:7608488899647309872:2378], cookie# 281474976715661 2026-02-19T08:22:48.807233Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:7608488899647309863:2371] Ack for unknown update (already acked?): sender# [1:7608488899647309872:2378], cookie# 281474976715661 2026-02-19T08:22:48.807303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341858:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 7 }: sender# [1:7608488903942277515:2627] 2026-02-19T08:22:48.807319Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341861:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 7 }: sender# [1:7608488903942277516:2627] 2026-02-19T08:22:48.807318Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488899647309517:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1771489368509 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "dir" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1771489368593 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_USER_0_kind_storage-pool-number-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_storage-pool-number-2" Kind: "storage-pool-number-2" } StoragePools { Name: "name_USER_0_kind_test" Kind: "test" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 } 2026-02-19T08:22:48.807331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7608488895352341864:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 7 }: sender# [1:7608488903942277517:2627] 2026-02-19T08:22:48.807572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-02-19T08:22:48.807629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-02-19T08:22:48.807638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2026-02-19T08:22:48.807648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2026-02-19T08:22:48.807659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2026-02-19T08:22:48.807787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488899647309517:2145], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1771489368509 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "dir" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1771489368593 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_USER_0_kind_storage-pool-number-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_storage-pool-number-2" Kind: "storage-pool-number-2" } StoragePools { Name: "name_USER_0_kind_test" Kind: "test" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7608488903942277511:2627] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771489368509 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7608488903942277511:2627] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 6 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771489368509 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-02-19T08:22:48.807838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715661 2026-02-19T08:22:48.807924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] Test command err: 2026-02-19T08:22:41.020005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488872938183260:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:41.020051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:41.039428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:41.105923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:41.110809Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488872948772133:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:41.110867Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:41.578670Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.573883Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.574081Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:41.656988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.792520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.792617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.793878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.793989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.794858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.794929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.799581Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.799085Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.813269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.876406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.889093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.896225Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:22:41.896275Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:22:41.904317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.904713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:42.053531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:42.136130Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:42.204025Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:42.225804Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:42.228971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:42.229004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:42.229012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:42.229130Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:42.271318Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:42.646451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:42.846659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:42.974239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.218126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.485082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.622374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.514600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488890118054501:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.514728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.521858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488890118054511:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.521972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:45.874356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:45.983036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.021814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488872938183260:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.021877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:46.110900Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608488872948772133:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.110964Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:46.108938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.240092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.332124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.459447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.524651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.608976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:46.789194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488894413022915:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:46.789290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:46.789784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488894413022920:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:46.789832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488894413022921:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:46.789962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:46.799604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:46.835469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488894413022924:2422], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:22:46.921887Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488894413023003:4774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TxUsage::WriteToTopic_Demo_16_Table [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-02-19T08:22:46.341886Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.010732s 2026-02-19T08:22:46.358923Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488892207879940:2270];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.358979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.843724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.924633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:46.924759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:46.969913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:47.046950Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:47.062460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.361758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:47.434793Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488892207879948:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:47.434865Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488896502847712:2446] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:47.434969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488892207879954:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:47.435061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488892207880182:2293][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488892207879954:2146], cookie# 1 2026-02-19T08:22:47.436522Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488892207880235:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880232:2293], cookie# 1 2026-02-19T08:22:47.436556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488892207880236:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880233:2293], cookie# 1 2026-02-19T08:22:47.436570Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488892207880237:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880234:2293], cookie# 1 2026-02-19T08:22:47.436602Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488887912912293:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880235:2293], cookie# 1 2026-02-19T08:22:47.436629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488887912912296:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880236:2293], cookie# 1 2026-02-19T08:22:47.436644Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488887912912299:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488892207880237:2293], cookie# 1 2026-02-19T08:22:47.436682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488892207880235:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488887912912293:2051], cookie# 1 2026-02-19T08:22:47.436699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488892207880236:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488887912912296:2054], cookie# 1 2026-02-19T08:22:47.436714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488892207880237:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488887912912299:2057], cookie# 1 2026-02-19T08:22:47.436788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488892207880182:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488892207880232:2293], cookie# 1 2026-02-19T08:22:47.436813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488892207880182:2293][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:47.436835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488892207880182:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488892207880233:2293], cookie# 1 2026-02-19T08:22:47.436861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488892207880182:2293][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:47.436887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488892207880182:2293][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488892207880234:2293], cookie# 1 2026-02-19T08:22:47.436899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488892207880182:2293][/dc-1] Sync cookie mismatch: sender# [1:7608488892207880234:2293], cookie# 1, current cookie# 0 2026-02-19T08:22:47.436956Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488892207879954:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:47.443820Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488892207879954:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488892207880182:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:47.443948Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488892207879954:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488892207880182:2293] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:47.452263Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488896502847713:2447], recipient# [1:7608488896502847712:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:47.452370Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488896502847712:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:47.507126Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488896502847712:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:47.518612Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488896502847712:2446] Handle TEvDescribeSchemeResult Forward to# [1:7608488896502847711:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:47.520289Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488892207879948:2144] Handle TEvProposeTransaction 2026-02-19T08:22:47.520311Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488892207879948:2144] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:22:47.520375Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488892207879948:2144] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608488896502847718:2451] 2026-02-19T08:22:47.687560Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608488896502847718:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain ... :7608488905092782831:2855] Handle TEvDescribeSchemeResult Forward to# [1:7608488905092782830:2854] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1771489369700 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046644480 2026-02-19T08:22:50.026590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7608488887912912293:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7608488903922585597:2103] 2026-02-19T08:22:50.026636Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7608488887912912293:2051] Unsubscribe: subscriber# [3:7608488903922585597:2103], path# /dc-1/USER_0 2026-02-19T08:22:50.026674Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7608488887912912296:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7608488903922585598:2103] 2026-02-19T08:22:50.026685Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7608488887912912296:2054] Unsubscribe: subscriber# [3:7608488903922585598:2103], path# /dc-1/USER_0 2026-02-19T08:22:50.026704Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7608488887912912299:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7608488903922585599:2103] 2026-02-19T08:22:50.026713Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7608488887912912299:2057] Unsubscribe: subscriber# [3:7608488903922585599:2103], path# /dc-1/USER_0 2026-02-19T08:22:50.027735Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:22:50.028866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:22:50.741884Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488903922585704:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.742003Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488903922585704:2107], cacheItem# { Subscriber: { Subscriber: [3:7608488908217553344:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:50.742086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488912512520719:2354], recipient# [3:7608488912512520718:2317], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.747398Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488903922585704:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.747508Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488903922585704:2107], cacheItem# { Subscriber: { Subscriber: [3:7608488908217553344:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:51.747597Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488916807488017:2355], recipient# [3:7608488916807488016:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:52.750156Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488903922585704:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:52.750274Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488903922585704:2107], cacheItem# { Subscriber: { Subscriber: [3:7608488908217553344:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:52.750358Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488921102455315:2356], recipient# [3:7608488921102455314:2319], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TxUsage::WriteToTopic_Demo_16_Query >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2026-02-19T08:22:47.264567Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488899032297252:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:47.264836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:47.617207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.646649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:47.646754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:47.667738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:47.749605Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:47.783651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.095115Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488899032297455:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:48.095166Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488903327265210:2440] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:48.095333Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488899032297462:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.095458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488899032297689:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488899032297462:2147], cookie# 1 2026-02-19T08:22:48.096937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899032297744:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297741:2290], cookie# 1 2026-02-19T08:22:48.096975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899032297745:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297742:2290], cookie# 1 2026-02-19T08:22:48.096989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899032297746:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297743:2290], cookie# 1 2026-02-19T08:22:48.097025Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488894737329800:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297744:2290], cookie# 1 2026-02-19T08:22:48.097049Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488894737329803:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297745:2290], cookie# 1 2026-02-19T08:22:48.097063Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488894737329806:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899032297746:2290], cookie# 1 2026-02-19T08:22:48.097109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899032297744:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894737329800:2051], cookie# 1 2026-02-19T08:22:48.097131Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899032297745:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894737329803:2054], cookie# 1 2026-02-19T08:22:48.097146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899032297746:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894737329806:2057], cookie# 1 2026-02-19T08:22:48.097187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899032297689:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899032297741:2290], cookie# 1 2026-02-19T08:22:48.097246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488899032297689:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:48.097268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899032297689:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899032297742:2290], cookie# 1 2026-02-19T08:22:48.097296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488899032297689:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:48.097330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899032297689:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899032297743:2290], cookie# 1 2026-02-19T08:22:48.097341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488899032297689:2290][/dc-1] Sync cookie mismatch: sender# [1:7608488899032297743:2290], cookie# 1, current cookie# 0 2026-02-19T08:22:48.097410Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488899032297462:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:48.102729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488899032297462:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488899032297689:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:48.102881Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488899032297462:2147], cacheItem# { Subscriber: { Subscriber: [1:7608488899032297689:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:48.105320Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488903327265211:2441], recipient# [1:7608488903327265210:2440], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:48.105372Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488903327265210:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:48.138937Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488903327265210:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:48.143195Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488903327265210:2440] Handle TEvDescribeSchemeResult Forward to# [1:7608488903327265209:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:48.146939Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488899032297455:2144] Handle TEvProposeTransaction 2026-02-19T08:22:48.146962Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488899032297455:2144] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:22:48.147023Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488899032297455:2144] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7608488903327265217:2446] 2026-02-19T08:22:48.255324Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:7608488903327265217:2446] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-02-19T08:22:48.255369Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# ... serveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 } 2026-02-19T08:22:50.706966Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7608488907294935224:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1771489368390 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1771489370650 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_USER_0_kind_storage-pool-number-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_storage-pool-number-2" Kind: "storage-pool-number-2" } StoragePools { Name: "name_USER_0_kind_test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [3:7608488911589902529:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771489368390 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [3:7608488911589902529:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1771489368390 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-02-19T08:22:50.945424Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:50.954511Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488907294935224:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.954557Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [3:7608488907294935224:2107], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-02-19T08:22:50.955385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:22:50.960181Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7608488911589902915:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488894737329800:2051] 2026-02-19T08:22:50.960222Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7608488911589902916:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488894737329803:2054] 2026-02-19T08:22:50.960272Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7608488911589902917:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7608488894737329806:2057] 2026-02-19T08:22:50.960314Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7608488911589902912:2342] 2026-02-19T08:22:50.960362Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7608488911589902913:2342] 2026-02-19T08:22:50.960407Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [3:7608488907294935224:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:50.960431Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7608488911589902914:2342] 2026-02-19T08:22:50.960473Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488911589902910:2342][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7608488907294935224:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:50.960571Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7608488907294935224:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 } 2026-02-19T08:22:50.960647Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7608488907294935224:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7608488911589902910:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:50.960755Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488907294935224:2107], cacheItem# { Subscriber: { Subscriber: [3:7608488911589902910:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:50.960841Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488911589902918:2344], recipient# [3:7608488911589902906:2316], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:50.723288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2026-02-19T08:22:51.962330Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488907294935224:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:51.962433Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488907294935224:2107], cacheItem# { Subscriber: { Subscriber: [3:7608488911589902910:2342] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:51.962499Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488915884870248:2361], recipient# [3:7608488915884870247:2317], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2026-02-19T08:22:48.041595Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488903759216682:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:48.041889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:48.159824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:48.579716Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.579862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:48.601991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.659526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:48.659677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:48.667406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:48.667480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:48.675716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:48.686571Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:22:48.687397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:48.730464Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:48.809963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.820094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.981535Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488903759216807:2145] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:48.981587Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488903759217230:2419] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:48.981681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488903759216818:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.981775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488903759217043:2297][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488903759216818:2148], cookie# 1 2026-02-19T08:22:48.983433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488903759217104:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217101:2297], cookie# 1 2026-02-19T08:22:48.983486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488903759217105:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217102:2297], cookie# 1 2026-02-19T08:22:48.983504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488903759217106:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217103:2297], cookie# 1 2026-02-19T08:22:48.983535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488899464249148:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217104:2297], cookie# 1 2026-02-19T08:22:48.983565Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488899464249151:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217105:2297], cookie# 1 2026-02-19T08:22:48.983583Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488899464249154:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488903759217106:2297], cookie# 1 2026-02-19T08:22:48.983625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488903759217104:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899464249148:2052], cookie# 1 2026-02-19T08:22:48.983650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488903759217105:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899464249151:2055], cookie# 1 2026-02-19T08:22:48.983690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488903759217106:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899464249154:2058], cookie# 1 2026-02-19T08:22:48.983733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488903759217043:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488903759217101:2297], cookie# 1 2026-02-19T08:22:48.983775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488903759217043:2297][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:48.983797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488903759217043:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488903759217102:2297], cookie# 1 2026-02-19T08:22:48.983824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488903759217043:2297][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:48.983850Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488903759217043:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488903759217103:2297], cookie# 1 2026-02-19T08:22:48.983864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488903759217043:2297][/dc-1] Sync cookie mismatch: sender# [1:7608488903759217103:2297], cookie# 1, current cookie# 0 2026-02-19T08:22:48.983925Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488903759216818:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:48.989967Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488903759216818:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488903759217043:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:48.990086Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488903759216818:2148], cacheItem# { Subscriber: { Subscriber: [1:7608488903759217043:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:48.993083Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488903759217231:2420], recipient# [1:7608488903759217230:2419], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:48.993145Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488903759217230:2419] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:49.040141Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488903759217230:2419] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:49.044685Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488903759217230:2419] Handle TEvDescribeSchemeResult Forward to# [1:7608488903759217229:2418] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 10 ... wn Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:53.982691Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488902511642541:2110], cacheItem# { Subscriber: { Subscriber: [2:7608488923986479215:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:53.982749Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488902511642541:2110], cacheItem# { Subscriber: { Subscriber: [2:7608488923986479216:2130] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:53.982878Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488923986479289:2137], recipient# [2:7608488923986479211:2303], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:53.985978Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7608488923986479211:2303], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:54.070426Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7608488923986479230:2131] 2026-02-19T08:22:54.070510Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7608488923986479224:2130] 2026-02-19T08:22:54.070534Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.070570Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.070573Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7608488923986479231:2131] 2026-02-19T08:22:54.070598Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.070605Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7608488923986479225:2130] 2026-02-19T08:22:54.070619Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7608488923986479232:2131] 2026-02-19T08:22:54.070625Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.070642Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479217:2131][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.070642Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7608488923986479226:2130] 2026-02-19T08:22:54.070670Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479216:2130][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.082373Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7608488923986479218:2129] 2026-02-19T08:22:54.082445Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.082466Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7608488923986479219:2129] 2026-02-19T08:22:54.082487Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.082504Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7608488923986479220:2129] 2026-02-19T08:22:54.082633Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7608488923986479215:2129][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7608488902511642541:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:54.432970Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488902511642541:2110], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:54.433115Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488902511642541:2110], cacheItem# { Subscriber: { Subscriber: [2:7608488923986479217:2131] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:54.433218Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488928281446596:2138], recipient# [2:7608488928281446595:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:54.433350Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TConsoleTests::TestRestartConsoleAndPools >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TConsoleConfigHelpersTests::TestConfigCourier >> KqpKnn::Uint8VectorKnnPushdown [GOOD] >> TConsoleConfigTests::TestModifyConfigItem >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> KqpKnn::BitVectorKnnPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2026-02-19T08:22:46.026421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488894418329827:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.028514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.070622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:46.373967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.399547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:46.399648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:46.408866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:46.509194Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:46.514424Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488890123362486:2081] 1771489366016315 != 1771489366016318 2026-02-19T08:22:46.621827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.789400Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488894418330028:2101] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:46.789452Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488894418330551:2433] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:46.789545Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488894418330071:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:46.789641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488894418330368:2316][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488894418330071:2125], cookie# 1 2026-02-19T08:22:46.791178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894418330402:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330399:2316], cookie# 1 2026-02-19T08:22:46.791224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894418330403:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330400:2316], cookie# 1 2026-02-19T08:22:46.791241Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488894418330404:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330401:2316], cookie# 1 2026-02-19T08:22:46.791362Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890123362454:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330402:2316], cookie# 1 2026-02-19T08:22:46.791395Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890123362457:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330403:2316], cookie# 1 2026-02-19T08:22:46.791413Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488890123362460:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488894418330404:2316], cookie# 1 2026-02-19T08:22:46.791470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894418330402:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890123362454:2049], cookie# 1 2026-02-19T08:22:46.791497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894418330403:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890123362457:2052], cookie# 1 2026-02-19T08:22:46.791520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488894418330404:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488890123362460:2055], cookie# 1 2026-02-19T08:22:46.791560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894418330368:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894418330399:2316], cookie# 1 2026-02-19T08:22:46.791583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488894418330368:2316][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:46.791607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894418330368:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894418330400:2316], cookie# 1 2026-02-19T08:22:46.791633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488894418330368:2316][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:46.791661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488894418330368:2316][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488894418330401:2316], cookie# 1 2026-02-19T08:22:46.791677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488894418330368:2316][/dc-1] Sync cookie mismatch: sender# [1:7608488894418330401:2316], cookie# 1, current cookie# 0 2026-02-19T08:22:46.791728Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488894418330071:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:46.803844Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488894418330071:2125], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488894418330368:2316] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:46.804013Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488894418330071:2125], cacheItem# { Subscriber: { Subscriber: [1:7608488894418330368:2316] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:46.812862Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488894418330552:2434], recipient# [1:7608488894418330551:2433], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:46.813481Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488894418330551:2433] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:46.886905Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488894418330551:2433] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:46.890445Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488894418330551:2433] Handle TEvDescribeSchemeResult Forward to# [1:7608488894418330550:2432] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-02-19T08:22:46.891770Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7608488894418330028:2101] Handle TEvProposeTransaction 2026-02-19T08:22:46.891793Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7608488894418330028:2101] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:22:46.891848Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7608488894418330028:2101] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7608488894418330557:2438] 2026-02-19T08:22:47.029660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache ... er# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.940531Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7608488932997177954:2744] 2026-02-19T08:22:55.940557Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.962482Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7608488915817307666:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:55.962655Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7608488915817307666:2105], cacheItem# { Subscriber: { Subscriber: [4:7608488932997177942:2742] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:55.962704Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7608488915817307666:2105], cacheItem# { Subscriber: { Subscriber: [4:7608488932997177943:2743] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:55.962817Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7608488932997177983:2747], recipient# [4:7608488932997177937:2368], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:55.963533Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7608488932997177937:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:55.974872Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608488932997177945:2742] 2026-02-19T08:22:55.974968Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.975006Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608488932997177946:2742] 2026-02-19T08:22:55.975034Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.975052Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7608488932997177947:2742] 2026-02-19T08:22:55.975075Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177942:2742][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.975188Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608488932997177955:2743] 2026-02-19T08:22:55.975213Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.975256Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608488932997177956:2743] 2026-02-19T08:22:55.975285Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.975308Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7608488932997177953:2743] 2026-02-19T08:22:55.975336Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177943:2743][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.982033Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7608488932997177951:2744] 2026-02-19T08:22:55.982111Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.982160Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7608488932997177952:2744] 2026-02-19T08:22:55.982188Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:22:55.982210Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7608488932997177954:2744] 2026-02-19T08:22:55.982269Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7608488932997177944:2744][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7608488915817307666:2105], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> Donor::SkipBadDonor |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpScanLogs::GraceJoin+EnabledLogs [GOOD] >> KqpScanLogs::GraceJoin-EnabledLogs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:22:45.334432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:45.452769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:22:45.471923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:45.472017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:22:45.472570Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:45.777200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:45.777380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:45.873546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489362413260 != 1771489362413264 2026-02-19T08:22:45.888518Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:45.938510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:46.089279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:22:46.486466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:46.486531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:46.486576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:46.487078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:46.499769Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:46.861138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:46.950755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.237945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.614535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.955565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:48.882305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.882514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.883587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.883707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.916399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.122686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.367552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.706952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.998849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.282424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.569657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.940805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:51.371302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.371415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.371934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2804:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.372010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.372081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2807:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.378767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:51.542183Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2809:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:22:51.617220Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2864:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:54.149549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TLocksTest::Range_BrokenLock2 >> TFlatTest::CopyTableAndCompareColumnsSchema >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TFlatTest::SelectRangeForbidNullArgs2 >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:22:45.101700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:45.216072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:22:45.237351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:45.237447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:22:45.238033Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:45.533144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:45.533310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:45.624180Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489362167040 != 1771489362167044 2026-02-19T08:22:45.642108Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:45.695001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:45.821325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:22:46.335857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:46.335955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:46.336006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:46.336526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:46.348347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:46.735198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:46.878726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.169494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.576297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:47.911620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:48.931940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1915:3518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.932261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.933011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1988:3537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.933116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:48.969765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.196196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.498895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:49.860006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.225853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.582610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:50.955228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:51.370411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:51.887295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.887453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.887793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2801:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.888231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.888293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:51.893546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:52.095316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2805:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:22:52.166479Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2865:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:54.647770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TFlatTest::SelectRangeNullArgs3 >> TFlatTest::WriteSplitByPartialKeyAndRead >> Donor::ContinueWithFaultyDonor |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode >> BasicUsage::CreateTopicWithStreamingConsumer [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping >> Donor::SkipBadDonor [GOOD] >> TxUsage::WriteToTopic_Demo_40_Table [GOOD] |93.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |93.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |93.0%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> BasicUsage::SimpleBlockingKeyedWriteSession_ManyMessages [GOOD] >> Describe::LocationWithKillTablets >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003708/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk7 2026-02-19T08:17:44.491327Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608487599359934725:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:44.491387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:17:44.575792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:17:45.101530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:17:45.101648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:17:45.104971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:17:45.199137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:17:45.233247Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:17:45.256564Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608487599359934684:2081] 1771489064484445 != 1771489064484448 2026-02-19T08:17:45.539999Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:17:45.549814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:17:45.549842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:17:45.549849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:17:45.549963Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:17:45.605797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:17:46.255595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:17:46.291209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:17:46.418877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.676526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:46.880583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:47.011426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:49.497834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608487599359934725:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:17:49.497916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:17:50.398728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487625129740350:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.398847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.405025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487625129740360:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.405129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:50.961053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.015342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.072374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.111382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.154827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.208254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.270270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.353688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:17:51.512734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487629424708536:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.512830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.513396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487629424708541:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.513475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608487629424708542:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.513597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:17:51.519584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:17:51.540554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608487629424708545:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:17:51.614272Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608487629424708598:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:18:00.149809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:18:00.149846Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '730) '('"_id" '"7c730c0f-ca32a274-e609dd46-1c624064") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV2")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '686) '('"_id" '"fc6100c5-d0c0de-ec3c6f88-9c64d8ae") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '698) '('"_id" '"9157453c-7414738-bad66e80-28f5aa8e")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestValidation >> Donor::CheckOnlineReadRequestToDonor >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct >> BasicUsage::CreateTopicWithManyPartitions [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 8391947688295843218 2026-02-19T08:23:01.055115Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:01.057057Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13840532972960875026] 2026-02-19T08:23:01.085532Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TxUsage::WriteToTopic_Demo_41_Table >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2026-02-19T08:22:46.390240Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488895323566252:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.396582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.681820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:46.737813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:46.737939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:46.751112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:46.923131Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:46.940087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.329997Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488895323566461:2144] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:47.330042Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488899618534220:2445] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:47.330145Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488895323566467:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:47.330232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488895323566695:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488895323566467:2146], cookie# 1 2026-02-19T08:22:47.331727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488895323566750:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566747:2291], cookie# 1 2026-02-19T08:22:47.331757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488895323566751:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566748:2291], cookie# 1 2026-02-19T08:22:47.331783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488895323566752:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566749:2291], cookie# 1 2026-02-19T08:22:47.331821Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895323566102:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566750:2291], cookie# 1 2026-02-19T08:22:47.331849Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895323566105:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566751:2291], cookie# 1 2026-02-19T08:22:47.331864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895323566108:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488895323566752:2291], cookie# 1 2026-02-19T08:22:47.331916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488895323566750:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566102:2051], cookie# 1 2026-02-19T08:22:47.331933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488895323566751:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566105:2054], cookie# 1 2026-02-19T08:22:47.331947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488895323566752:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566108:2057], cookie# 1 2026-02-19T08:22:47.331997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488895323566695:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566747:2291], cookie# 1 2026-02-19T08:22:47.332021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488895323566695:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:47.332052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488895323566695:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566748:2291], cookie# 1 2026-02-19T08:22:47.332076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488895323566695:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:47.332109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488895323566695:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895323566749:2291], cookie# 1 2026-02-19T08:22:47.332123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488895323566695:2291][/dc-1] Sync cookie mismatch: sender# [1:7608488895323566749:2291], cookie# 1, current cookie# 0 2026-02-19T08:22:47.332191Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488895323566467:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:47.344371Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488895323566467:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488895323566695:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:47.344512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488895323566467:2146], cacheItem# { Subscriber: { Subscriber: [1:7608488895323566695:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:47.347039Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488899618534221:2446], recipient# [1:7608488899618534220:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:47.347106Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488899618534220:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:47.408599Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:47.408880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488895323566467:2146], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:47.408954Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7608488895323566467:2146], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-02-19T08:22:47.409141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7608488899618534223:2447][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-02-19T08:22:47.409533Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895323566102:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488899618534227:2447] 2026-02-19T08:22:47.409548Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895323566102:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:47.409644Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7608488895323566102:2051] Subscribe: subscriber# [1:7608488899618534227:2447], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:22:47.409689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895323566105:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488899618534228:2447] 2026-02-19T08:22:47.409696Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895323566105:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:47.409735Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7608488895323566105:2054] Subscribe: subscriber# [1:7608488899618534228:2447], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-02-19T08:22:47.409758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7608488895323566108:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7608488899618534229:2447] 2026-02-19T08:22:47.409765Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7608488895323566108:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-02-19T08:22:47.409789Z node 1 :SCHEME_BOARD_REPLICA INFO ... ata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:58.985120Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488927286622952:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:58.985273Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488927286622952:2225], cacheItem# { Subscriber: { Subscriber: [2:7608488944466492306:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:58.985340Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488927286622952:2225], cacheItem# { Subscriber: { Subscriber: [2:7608488944466492307:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:58.985483Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488944466492442:2355], recipient# [2:7608488944466492439:2561], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:58.990251Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:22:59.041926Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7608488901718887363:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.042053Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7608488901718887363:2231], cacheItem# { Subscriber: { Subscriber: [3:7608488918898756720:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:59.042135Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488948963535854:5003], recipient# [3:7608488948963535853:4339], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.489961Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608488927286622461:2221];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:59.490055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:22:59.538429Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488927286622952:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.538545Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488927286622952:2225], cacheItem# { Subscriber: { Subscriber: [2:7608488944466492305:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:59.538612Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488948761459740:2356], recipient# [2:7608488948761459739:2564], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.554235Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488927286622952:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.554484Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488927286622952:2225], cacheItem# { Subscriber: { Subscriber: [2:7608488944466492305:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:59.554538Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488927286622952:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.554584Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488927286622952:2225], cacheItem# { Subscriber: { Subscriber: [2:7608488944466492308:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:22:59.554638Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488948761459743:2357], recipient# [2:7608488948761459741:2565], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:59.554678Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488948761459744:2358], recipient# [2:7608488948761459742:2566], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:00.490552Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488927286622952:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> Donor::ContinueWithFaultyDonor [GOOD] >> Donor::MultipleEvicts >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongName |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query >> Donor::CheckOnlineReadRequestToDonor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2026-02-19T08:22:46.516291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488895165014358:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.539268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:46.639455Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488892860285475:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:46.639509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:47.309799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.349836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.404926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:47.405046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:47.415385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:47.415482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:47.434580Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:22:47.434769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:47.457370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:47.570277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:47.711329Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:47.745336Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:47.745666Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:47.779058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.442688Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7608488895165014559:2145] Handle TEvNavigate describe path dc-1 2026-02-19T08:22:48.442745Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7608488903754949635:2456] HANDLE EvNavigateScheme dc-1 2026-02-19T08:22:48.442840Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7608488895165014566:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:22:48.442928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7608488899459982094:2296][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7608488895165014566:2147], cookie# 1 2026-02-19T08:22:48.444674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899459982153:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982150:2296], cookie# 1 2026-02-19T08:22:48.444707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899459982154:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982151:2296], cookie# 1 2026-02-19T08:22:48.444726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7608488899459982155:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982152:2296], cookie# 1 2026-02-19T08:22:48.444758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895165014197:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982153:2296], cookie# 1 2026-02-19T08:22:48.444813Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895165014200:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982154:2296], cookie# 1 2026-02-19T08:22:48.444833Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7608488895165014203:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7608488899459982155:2296], cookie# 1 2026-02-19T08:22:48.444878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899459982153:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895165014197:2052], cookie# 1 2026-02-19T08:22:48.444905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899459982154:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895165014200:2055], cookie# 1 2026-02-19T08:22:48.444930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7608488899459982155:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488895165014203:2058], cookie# 1 2026-02-19T08:22:48.444974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899459982094:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899459982150:2296], cookie# 1 2026-02-19T08:22:48.444998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7608488899459982094:2296][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-02-19T08:22:48.445035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899459982094:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899459982151:2296], cookie# 1 2026-02-19T08:22:48.445072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7608488899459982094:2296][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-02-19T08:22:48.445102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7608488899459982094:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7608488899459982152:2296], cookie# 1 2026-02-19T08:22:48.445116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7608488899459982094:2296][/dc-1] Sync cookie mismatch: sender# [1:7608488899459982152:2296], cookie# 1, current cookie# 0 2026-02-19T08:22:48.445174Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7608488895165014566:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-02-19T08:22:48.452898Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7608488895165014566:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7608488899459982094:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-02-19T08:22:48.453171Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7608488895165014566:2147], cacheItem# { Subscriber: { Subscriber: [1:7608488899459982094:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-02-19T08:22:48.456404Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7608488903754949636:2457], recipient# [1:7608488903754949635:2456], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:22:48.456467Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7608488903754949635:2456] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:22:48.509571Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7608488903754949635:2456] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-02-19T08:22:48.513563Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7608488903754949635:2456] Handle TEvDescribeSchemeResult Forward to# [1:7608488903754949634:2455] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 ... r: { Subscriber: [3:7608488956378952477:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:23:00.861995Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7608488956378952517:2256], recipient# [3:7608488956378952472:2564], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:00.862373Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7608488956378952472:2564], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:00.922056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7608488956378952479:2251] 2026-02-19T08:23:00.922143Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:00.922147Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7608488956378952485:2252] 2026-02-19T08:23:00.922172Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7608488956378952480:2251] 2026-02-19T08:23:00.922194Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:00.922194Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:00.922217Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7608488956378952486:2252] 2026-02-19T08:23:00.922222Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7608488956378952481:2251] 2026-02-19T08:23:00.922240Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:00.922247Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952476:2251][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:00.922265Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7608488956378952487:2252] 2026-02-19T08:23:00.922287Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7608488956378952477:2252][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7608488939199083136:2192], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:23:01.510198Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488892860285537:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:01.510335Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488892860285537:2107], cacheItem# { Subscriber: { Subscriber: [2:7608488922925056775:2129] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:23:01.510460Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488957284795219:2155], recipient# [2:7608488957284795218:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:01.699978Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488892860285537:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:01.700117Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488892860285537:2107], cacheItem# { Subscriber: { Subscriber: [2:7608488897155252959:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:23:01.700238Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488957284795221:2156], recipient# [2:7608488957284795220:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:01.834324Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7608488892860285537:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-02-19T08:23:01.834487Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7608488892860285537:2107], cacheItem# { Subscriber: { Subscriber: [2:7608488897155252959:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-02-19T08:23:01.834595Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7608488957284795223:2157], recipient# [2:7608488957284795222:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 5099027561168743234 2026-02-19T08:23:03.473767Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:03.475733Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6307539505369885433] 2026-02-19T08:23:03.499222Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> Donor::SlayAfterWiping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 14997072474886829081 2026-02-19T08:23:04.677884Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:04.680885Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13091416455016266482] 2026-02-19T08:23:04.703429Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2026-02-19T08:23:04.703661Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 |93.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table >> TLocksTest::Range_GoodLock0 >> TLocksFatTest::RangeSetRemove |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 13686246237739906035 2026-02-19T08:23:04.573852Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:04.575937Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4089365395995963872] 2026-02-19T08:23:04.597831Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TFlatTest::ReadOnlyMode >> Secret::Simple [GOOD] |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |93.1%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> Donor::MultipleEvicts [GOOD] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> Secret::SimpleQueryService [GOOD] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::ShardFreezeRejectBadProtobuf >> TFlatTest::SelectRangeNullArgs4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:46.180090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:46.332880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:46.353273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:46.353367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:20:46.354046Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:46.677978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:46.678159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:46.779101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489242456925 != 1771489242456929 2026-02-19T08:20:46.789984Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:46.843307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:46.944040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:47.532354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:47.532422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:47.532474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:47.540398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:47.541340Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:47.558138Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-02-19T08:20:59.749159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.749361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.749881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1051:2884], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.749960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.754372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:20:59.966369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1143:2957], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.966522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.967119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1147:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.967206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.967277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1150:2964], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:59.972449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:00.095293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1152:2966], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:21:00.364430Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1243:3030] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:00.985947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:01.491932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:02.135673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:02.907225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:03.467403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:04.728996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-02-19T08:21:05.169574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-02-19T08:21:08.261130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:08.261202Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:22:09.707698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715740:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:10.733568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715745:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:12.866280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:13.477084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 7430133776705152377 0 donors: 2026-02-19T08:23:06.981310Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:06.982107Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.013118Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2026-02-19T08:23:07.136112Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.136839Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.157540Z 25 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2026-02-19T08:23:07.236982Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.237752Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.252757Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2026-02-19T08:23:07.342201Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.342942Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.359771Z 25 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2026-02-19T08:23:07.458654Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.459254Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.473417Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2026-02-19T08:23:07.568635Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.569392Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.584684Z 25 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2026-02-19T08:23:07.705297Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.706103Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.721730Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 2026-02-19T08:23:07.824717Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.825518Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.842046Z 25 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2026-02-19T08:23:07.944401Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:07.945203Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12892878321193594077] 2026-02-19T08:23:07.961270Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:2:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 25:1000 >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TFlatTest::RejectByPerShardReadSize >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |93.1%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:47.141078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:47.284155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:47.303942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:47.304035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:20:47.304637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:47.624003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:47.624137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:47.717367Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489244098002 != 1771489244098006 2026-02-19T08:20:47.731099Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:47.778446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:47.898639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:48.514236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:48.514305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:48.514352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:48.517140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:48.518097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:48.533097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-02-19T08:21:00.407545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1018:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:00.407797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1033:2881], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:00.407906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:00.409421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1038:2886], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:00.409703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:00.414838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:00.438022Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1036:2884], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:21:00.494074Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1089:2918] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:00.748154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:01.736232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:02.189468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:03.121132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:03.958686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:04.431079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:05.617745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-02-19T08:21:06.070464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-02-19T08:21:09.230217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:09.230289Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:22:10.444336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:11.679876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:13.748740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:14.502225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TCancelTx::CrossShardReadOnly >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2026-02-19T08:23:00.304467Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488952916900076:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:00.304834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:00.341087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:00.757769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:00.765317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:00.765410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:00.845047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:00.927307Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.065834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.226835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.234348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:01.313536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:01.330089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.809694Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:04.809881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:04.817587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:04.821969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:04.915597Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:04.929897Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:05.057822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:05.331039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:05.346291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:05.370077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:05.380927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.610258Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2026-02-19T08:23:00.379387Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488955349036476:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:00.379460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:00.833852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:00.894421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:00.894540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:00.972838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:01.052539Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.061825Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488955349036252:2081] 1771489380358930 != 1771489380358933 2026-02-19T08:23:01.069539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.380110Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:01.436493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.444328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:01.577949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.238405Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488973885451393:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:05.238669Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:05.247477Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:05.465791Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:05.465996Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488973885451255:2081] 1771489385224233 != 1771489385224236 2026-02-19T08:23:05.494269Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:05.495670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:05.495741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:05.535742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:05.717650Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:05.811119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:05.838042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:05.860471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:05.868897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TxUsage::WriteToTopic_Demo_16_Query [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2026-02-19T08:22:56.559522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:56.559610Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:56.800841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:58.305408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:58.305481Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:58.386945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:00.222449Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:00.222522Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:00.327339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.861449Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:01.861537Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:02.093355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:03.731164Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:03.731223Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:03.903234Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:05.584773Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:05.584848Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:05.686574Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:06.918297Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:06.918368Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:07.039220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) E0219 08:23:08.606481283 355955 trace.cc:67] Unknown trace var: 'sdk_authz' 2026-02-19T08:23:08.607096Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CONFIGS has been changed from WARN to NOTICE 2026-02-19T08:23:08.607190Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CONFIGS has been changed from WARN to DEBUG 2026-02-19T08:23:08.607237Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CLUSTER has been changed from WARN to NOTICE 2026-02-19T08:23:08.607263Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CLUSTER has been changed from WARN to DEBUG 2026-02-19T08:23:08.607289Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_PROXY has been changed from WARN to NOTICE 2026-02-19T08:23:08.607315Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_PROXY has been changed from WARN to DEBUG 2026-02-19T08:23:08.607341Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_WORKER has been changed from WARN to NOTICE 2026-02-19T08:23:08.607368Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_WORKER has been changed from WARN to DEBUG 2026-02-19T08:23:08.607394Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_GATEWAY has been changed from WARN to NOTICE 2026-02-19T08:23:08.607417Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_GATEWAY has been changed from WARN to DEBUG 2026-02-19T08:23:08.607441Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_YQL has been changed from WARN to NOTICE 2026-02-19T08:23:08.607462Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_YQL has been changed from WARN to DEBUG 2026-02-19T08:23:08.607488Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_EXECUTER has been changed from WARN to NOTICE 2026-02-19T08:23:08.607516Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_EXECUTER has been changed from WARN to DEBUG 2026-02-19T08:23:08.607539Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPUTE has been changed from WARN to NOTICE 2026-02-19T08:23:08.607565Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPUTE has been changed from WARN to DEBUG 2026-02-19T08:23:08.607590Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SLOW_LOG has been changed from WARN to NOTICE 2026-02-19T08:23:08.607615Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SLOW_LOG has been changed from WARN to DEBUG 2026-02-19T08:23:08.607641Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_SERVICE has been changed from WARN to NOTICE 2026-02-19T08:23:08.607667Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_SERVICE has been changed from WARN to DEBUG 2026-02-19T08:23:08.607693Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_ACTOR has been changed from WARN to NOTICE 2026-02-19T08:23:08.607722Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_ACTOR has been changed from WARN to DEBUG 2026-02-19T08:23:08.607746Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to NOTICE 2026-02-19T08:23:08.607771Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to DEBUG 2026-02-19T08:23:08.607798Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to NOTICE 2026-02-19T08:23:08.607825Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to DEBUG 2026-02-19T08:23:08.607850Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_BLOBS_STORAGE has been changed from WARN to NOTICE 2026-02-19T08:23:08.607876Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_BLOBS_STORAGE has been changed from WARN to DEBUG 2026-02-19T08:23:08.607903Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_REQUEST has been changed from WARN to NOTICE 2026-02-19T08:23:08.607931Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_REQUEST has been changed from WARN to DEBUG 2026-02-19T08:23:08.607971Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_NODE has been changed from WARN to NOTICE 2026-02-19T08:23:08.608001Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_NODE has been changed from WARN to DEBUG 2026-02-19T08:23:08.608030Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_LOAD_TEST has been changed from WARN to NOTICE 2026-02-19T08:23:08.608055Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_LOAD_TEST has been changed from WARN to DEBUG 2026-02-19T08:23:08.608083Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SESSION has been changed from WARN to NOTICE 2026-02-19T08:23:08.608109Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SESSION has been changed from WARN to DEBUG 2026-02-19T08:23:08.608142Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to NOTICE 2026-02-19T08:23:08.608170Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to DEBUG 2026-02-19T08:23:08.608194Z node 8 :CMS_CONFIGS NOTICE: log_settings_con ... EGRITY has been changed from WARN to ALERT 2026-02-19T08:23:11.730020Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-02-19T08:23:11.730048Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2026-02-19T08:23:11.730081Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-02-19T08:23:11.730113Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-02-19T08:23:11.730141Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_BRIDGE has been changed from 0 to 10 2026-02-19T08:23:11.730169Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-02-19T08:23:11.730199Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-02-19T08:23:11.730226Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_CLUSTER_BALANCING has been changed from 0 to 10 2026-02-19T08:23:11.730258Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-02-19T08:23:11.730287Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-02-19T08:23:11.730317Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_BRIDGE_SYNC has been changed from 0 to 10 2026-02-19T08:23:11.730348Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-02-19T08:23:11.730377Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-02-19T08:23:11.730403Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PHANTOM_FLAG_STORAGE has been changed from 0 to 10 2026-02-19T08:23:11.730435Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-02-19T08:23:11.730466Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-02-19T08:23:11.730494Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2026-02-19T08:23:11.730524Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-02-19T08:23:11.730553Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-02-19T08:23:11.730585Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SASL_AUTH has been changed from 0 to 10 2026-02-19T08:23:11.730615Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-02-19T08:23:11.730645Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-02-19T08:23:11.730676Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2026-02-19T08:23:11.730708Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-02-19T08:23:11.730737Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-02-19T08:23:11.730762Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2026-02-19T08:23:11.730789Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TLI has been changed from WARN to ALERT 2026-02-19T08:23:11.730836Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TLI has been changed from WARN to ALERT 2026-02-19T08:23:11.730862Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TLI has been changed from 0 to 10 2026-02-19T08:23:11.730896Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-02-19T08:23:11.730926Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-02-19T08:23:11.730955Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2026-02-19T08:23:11.730988Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from WARN to ALERT 2026-02-19T08:23:11.731018Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from WARN to ALERT 2026-02-19T08:23:11.731048Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2026-02-19T08:23:11.731079Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-02-19T08:23:11.731110Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-02-19T08:23:11.731138Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2026-02-19T08:23:11.731168Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BRIDGE has been changed from WARN to ALERT 2026-02-19T08:23:11.731197Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BRIDGE has been changed from WARN to ALERT 2026-02-19T08:23:11.731227Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BRIDGE has been changed from 0 to 10 2026-02-19T08:23:11.731259Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TRANSFER has been changed from WARN to ALERT 2026-02-19T08:23:11.731292Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TRANSFER has been changed from WARN to ALERT 2026-02-19T08:23:11.731320Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TRANSFER has been changed from 0 to 10 2026-02-19T08:23:11.731350Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-02-19T08:23:11.731378Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-02-19T08:23:11.731407Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TOKEN_MANAGER has been changed from 0 to 10 2026-02-19T08:23:11.731438Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-02-19T08:23:11.731464Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-02-19T08:23:11.731494Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LOCAL_DB_BACKUP has been changed from 0 to 10 2026-02-19T08:23:11.731526Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-02-19T08:23:11.731555Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-02-19T08:23:11.731584Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SCHEMA_SECRET_CACHE has been changed from 0 to 10 2026-02-19T08:23:11.731616Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NBS_PARTITION has been changed from WARN to ALERT 2026-02-19T08:23:11.731648Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NBS_PARTITION has been changed from WARN to ALERT 2026-02-19T08:23:11.731676Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NBS_PARTITION has been changed from 0 to 10 2026-02-19T08:23:11.731707Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NBS2_LOAD_TEST has been changed from WARN to ALERT 2026-02-19T08:23:11.731737Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NBS2_LOAD_TEST has been changed from WARN to ALERT 2026-02-19T08:23:11.731763Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NBS2_LOAD_TEST has been changed from 0 to 10 2026-02-19T08:23:11.731892Z node 11 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ... waiting for config update (done) >> TFlatTest::AutoSplitBySize >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TFlatTest::WriteSplitKillRead |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |93.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> KqpAnalyze::AnalyzeError [GOOD] >> KqpExplain::AggGroupLimit >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2026-02-19T08:23:00.834918Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488956104379922:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:00.834989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:01.437963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.501794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:01.501882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:01.665838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488956104379705:2081] 1771489380785659 != 1771489380785662 2026-02-19T08:23:01.702477Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.715842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:01.741828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.838197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:02.240522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:02.247443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:02.321237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.708164Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:23:02.709842Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:23:02.709867Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:23:02.831520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976715668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:23:02.831762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-02-19T08:23:02.832093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:23:02.832124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:23:02.832407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:23:02.832439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715668:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-02-19T08:23:02.832658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-02-19T08:23:02.832699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:02.833563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715668, response: Status: StatusAccepted TxId: 281474976715668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:23:02.833673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-02-19T08:23:02.833818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2026-02-19T08:23:02.833861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:23:02.839211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:23:02.839440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:23:02.839957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2026-02-19T08:23:02.840049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-02-19T08:23:02.840093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715668, partId: 0, tablet: 72057594037968897 2026-02-19T08:23:02.840109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2026-02-19T08:23:02.840119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:3, partId: 0 2026-02-19T08:23:02.842216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-02-19T08:23:02.842284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:2, partId: 0 2026-02-19T08:23:02.842416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-02-19T08:23:02.842445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:23:02.842486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-02-19T08:23:02.842739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-02-19T08:23:02.842752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 281474976715668, shardIdx: 72057594046644480:3, partId: 0 2026-02-19T08:23:02.842852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-02-19T08:23:02.842875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-02-19T08:23:02.842925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 281474976715668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-02-19T08:23:02.842971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715668:0 CreateParts -> ConfigureParts 2026-02-19T08:23:02.843206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply> TLocksTest::Range_IncorrectDot1 >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TLocksTest::Range_BrokenLock0 >> TLocksFatTest::RangeSetBreak >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> TStreamingQueryTest::AlterStreamingQuery >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> TOlap::CreateStore >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TStreamingQueryTest::AlterStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict >> TxUsage::WriteToTopic_Demo_41_Table [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> Describe::LocationWithKillTablets [GOOD] >> Describe::DescribePartitionPermissions >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TxUsage::WriteToTopic_Demo_41_Query >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2026-02-19T08:23:09.671425Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488994543832587:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:09.671824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:09.695997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:10.192665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:10.192770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:10.246551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:10.273312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488994543832545:2081] 1771489389661899 != 1771489389661902 2026-02-19T08:23:10.276711Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:10.291427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:10.426047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:10.565108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:10.640600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:10.701422Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Error 128: Mix freeze cmd with other options is forbidden 2026-02-19T08:23:10.814711Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488998838800762:2565] txid# 281474976715659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 2026-02-19T08:23:10.817313Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488998838800775:2571] txid# 281474976715660, issues: { message: "Unexpected freeze state" severity: 1 } 128: Unexpected freeze state Error 128: Mix freeze cmd with other options is forbidden Error 128: Mix freeze cmd with other options is forbidden 2026-02-19T08:23:10.822229Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488998838800781:2576] txid# 281474976715661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } 2026-02-19T08:23:10.824540Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488998838800787:2581] txid# 281474976715662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } 2026-02-19T08:23:13.817769Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489010768121585:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.817926Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.840587Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.936395Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.941223Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489010768121540:2081] 1771489393809151 != 1771489393809154 2026-02-19T08:23:13.975479Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.975555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.985183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:14.016676Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:14.244207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:14.258731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:14.278762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:23:14.324522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |93.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:23:16.478547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:16.478645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:16.478709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:16.478751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:16.478806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:16.478842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:16.478871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:16.478895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:16.479003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:16.479078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:16.480050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:16.480384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:16.706023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:16.706109Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:16.735811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:16.736629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:16.736849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:16.754600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:16.755748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:16.756518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:16.756880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:16.760768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:16.760986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:16.762396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:16.762484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:16.762601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:16.762650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:16.762693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:16.762858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:17.031260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.032993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.033063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.033143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.033233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:17.033312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... T_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:23:20.117462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:23:20.117951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:23:20.118012Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:23:20.118122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:23:20.118159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:23:20.118203Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:23:20.118237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:23:20.118281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:23:20.118324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:23:20.118361Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:23:20.118395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:23:20.118476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:23:20.118526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:23:20.118562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:23:20.118591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:23:20.119230Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:23:20.119336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:23:20.119379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:23:20.119417Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:23:20.119460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:23:20.120292Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:23:20.120375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:23:20.120417Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:23:20.120446Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:23:20.120478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:23:20.120551Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:23:20.123976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:23:20.125773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:23:20.126023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:23:20.126066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:23:20.126445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:23:20.126557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:23:20.126598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:708:2696] TestWaitNotification: OK eventTxId 101 2026-02-19T08:23:20.127040Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:20.127276Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 259us result status StatusSuccess 2026-02-19T08:23:20.127642Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-02-19T08:23:20.130614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "UniqueName" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:23:20.130822Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-02-19T08:23:20.131000Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, at schemeshard: 72057594046678944 2026-02-19T08:23:20.133734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:23:20.133979Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, operation: ALTER STREAMING QUERY, path: UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:23:20.134284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:23:20.134352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:23:20.134742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:23:20.134842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:23:20.134881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:716:2704] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2026-02-19T08:23:08.139469Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488986810707902:2166];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:08.139755Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:08.172354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:08.237748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:08.963439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.030646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:09.030740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:09.156779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:09.188600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:09.210375Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:09.217837Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488986810707773:2081] 1771489388080993 != 1771489388080996 2026-02-19T08:23:09.384129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.648424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:09.967413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:23:09.967635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-02-19T08:23:09.967746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-02-19T08:23:09.967783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-02-19T08:23:09.967799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710658:0 type: TxMkDir target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-02-19T08:23:09.967840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:09.968015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:23:09.968074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:23:09.978501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 38, at schemeshard: 72057594046644480 2026-02-19T08:23:09.978692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2026-02-19T08:23:09.978866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:23:09.978887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:23:09.979048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:23:09.979137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:23:09.979153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608488991105675744:2371], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2026-02-19T08:23:09.979168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608488991105675744:2371], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 38 2026-02-19T08:23:09.979209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-02-19T08:23:09.979246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:65: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:23:09.979280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2026-02-19T08:23:09.984623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:23:09.986290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:09.986389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:09.986401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-02-19T08:23:09.986422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2026-02-19T08:23:09.986440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-02-19T08:23:09.986611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:09.986651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:09.986672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-02-19T08:23:09.986685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 2 2026-02-19T08:23:09.986695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:23:09.986732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-02-19T08:23:09.994716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2026-02-19T08:23:09.994837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:23:09.994913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:09.994938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-02-19T08:23:10.000587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:10.000609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-02-19T08:23:10.000627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:10.001291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489390041, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:10.001423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771489390041 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 720575 ... is registered, txId: 281474976710661, at schemeshard: 72057594046644480 2026-02-19T08:23:10.446856Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{223, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:23:10.446890Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:10.448425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.448475Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-02-19T08:23:10.448510Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:10.448562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.448577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2026-02-19T08:23:10.448593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2026-02-19T08:23:10.448622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 5 2026-02-19T08:23:10.448711Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{223, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-02-19T08:23:10.448738Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:10.448825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.448856Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-02-19T08:23:10.448871Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:10.448908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.448915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2026-02-19T08:23:10.448929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 3 2026-02-19T08:23:10.448941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:23:10.448976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2026-02-19T08:23:10.448989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7608488995400643720:2296] 2026-02-19T08:23:10.449050Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{224, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-02-19T08:23:10.449071Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:10.449811Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:20:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.449835Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:20:1:24576:109:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.450216Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} commited cookie 1 for step 20 2026-02-19T08:23:10.451642Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.451687Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:133:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.451814Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2026-02-19T08:23:10.452840Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.452884Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.452918Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.452933Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:123:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-02-19T08:23:10.452990Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2026-02-19T08:23:10.453011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.453060Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2026-02-19T08:23:10.453073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-02-19T08:23:10.453179Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:406: TClient[72057594046644480] received poison pill [1:7608488995400643721:2296] 2026-02-19T08:23:10.453202Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594046644480] notify reset [1:7608488995400643721:2296] 2026-02-19T08:23:10.453286Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7608488995400643721:2296] 2026-02-19T08:23:13.402886Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:13.403086Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489011886996599:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.403159Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.565904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.568940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.569009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.577899Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489011886996343:2081] 1771489393340889 != 1771489393340892 2026-02-19T08:23:13.610201Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.624423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:13.826887Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.833821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.857394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:14.388240Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:18.405794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608489011886996599:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:18.405863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:18.657869Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7608489033361834180:2806] txid# 281474976710700 FailProposedRequest: Transaction incoming read set size 1000088 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2026-02-19T08:23:18.657953Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7608489033361834180:2806] txid# 281474976710700 RESPONSE Status# ExecError marker# P13c >> TFlatTest::CopyTableAndDropCopy [GOOD] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |93.2%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> TxUsage::WriteToTopic_Demo_11_Table [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: 2026-02-19T08:21:49.916863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488649299160755:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:50.060793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:50.120422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:50.598250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:50.598349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:50.639610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:50.657849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:50.665086Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488649299160710:2081] 1771489309910285 != 1771489309910288 2026-02-19T08:21:50.679297Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:50.866846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:51.062661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:51.062695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:51.062704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:51.062833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:51.070608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:51.668980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:54.486763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488670773998069:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.486865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.487244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488670773998096:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.487318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488670773998098:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.487442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:54.492530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:54.506187Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488670773998100:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:21:54.623583Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488670773998151:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:54.924136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488649299160755:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:54.924233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:54.969599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=528;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=528;columns=4; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; 2026-02-19T08:21:56.784309Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488677995174967:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:56.784357Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:56.821414Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:57.109816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:57.141882Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488677995174731:2081] 1771489316775510 != 1771489316775513 2026-02-19T08:21:57.149266Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:57.319848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:57.319942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:57.361802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:57.376941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:57.833827Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:57.847763Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:57.918357Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:57.918377Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:57.918385Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:57.918467Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:58.497323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:58.514255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:01.769004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608488699470012090:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not ... cutions 2026-02-19T08:23:03.410054Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:03.605425Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7608488966004864803:2081] 1771489383286106 != 1771489383286109 2026-02-19T08:23:03.737821Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:03.745255Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:03.745400Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:03.746799Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:03.754105Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:03.962361Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:03.962389Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:03.962407Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:03.962517Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:04.088083Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:04.325915Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:05.067560Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:05.080644Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:10.389676Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608488996069636773:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:10.389848Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:10.390243Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608488996069636794:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:10.390338Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608488996069636795:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:10.390522Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:10.396849Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:10.411618Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7608488996069636798:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:23:10.474483Z node 12 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [12:7608488996069636851:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; 2026-02-19T08:23:12.334442Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7608489007494204274:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:12.334518Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:12.365376Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:12.514640Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:12.514787Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:12.520084Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:12.523912Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7608489007494204232:2081] 1771489392333075 != 1771489392333078 2026-02-19T08:23:12.576056Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:12.578598Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:12.686698Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:12.686730Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:12.686760Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:12.686919Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:13.038591Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:13.349595Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:13.617203Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.634433Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:17.342201Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7608489007494204274:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:17.342316Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:18.818746Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489033264008898:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:18.818828Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489033264008908:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:18.818937Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:18.825881Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489033264008928:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:18.826057Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:18.826652Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:18.845999Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7608489033264008927:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:23:18.958826Z node 13 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [13:7608489033264008980:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestRemoveTenant >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind >> TxUsage::WriteToTopic_Demo_11_Query |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TCdcStreamTests::Basic >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TCdcStreamTests::VirtualTimestamps >> THealthCheckTest::DatabaseDoesNotExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2026-02-19T08:23:00.092660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488956482768746:2135];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:00.092704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:00.242182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:00.313811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:00.812093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:00.812175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:00.822875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:00.914930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.078092Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.081999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488956482768651:2081] 1771489380048859 != 1771489380048862 2026-02-19T08:23:01.098187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:01.181863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:01.201922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:01.665284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.687229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:01.753693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.149917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.323664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.452275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.600182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.756210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:02.921864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.093760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.244642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.385956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.479382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.582734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.728465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:03.900454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.056980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.256799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.454229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.627218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.752145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.009549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.094969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488956482768746:2135];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:05.095028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:05.146230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.323531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.706685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... thId [OwnerId: 72057594046644480, LocalPathId: 42] was 4 2026-02-19T08:23:18.665176Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037895 state Ready 2026-02-19T08:23:18.665211Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2026-02-19T08:23:18.665306Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037894 state Ready 2026-02-19T08:23:18.665323Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-02-19T08:23:18.674851Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037894, clientId# [2:7608489030664646668:3210], serverId# [2:7608489030664646669:3211], sessionId# [0:0:0] 2026-02-19T08:23:18.674975Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.676771Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.676831Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.687191Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037895, clientId# [2:7608489030664646678:3217], serverId# [2:7608489030664646679:3218], sessionId# [0:0:0] 2026-02-19T08:23:18.687314Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.688869Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.688936Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.692873Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.694270Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.694318Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.697371Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.703444Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.703533Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.708752Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.711011Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.711069Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.714589Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:23:18.716297Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:23:18.716342Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-02-19T08:23:18.716654Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.717793Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.717841Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.719406Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:23:18.722123Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:23:18.722172Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-02-19T08:23:18.730032Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.734793Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.734864Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.741338Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.742645Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.742698Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.745826Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.754117Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.754204Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.757784Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.759160Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.759201Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.762432Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.763798Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.763838Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.766226Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:23:18.766656Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:23:18.766671Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-02-19T08:23:18.769361Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.782623Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.782720Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.789500Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:23:18.796651Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:23:18.796683Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-02-19T08:23:18.796918Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.810430Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.810523Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.814523Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.822219Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.822303Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-02-19T08:23:18.829921Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-02-19T08:23:18.835746Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-02-19T08:23:18.835820Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-02-19T08:23:18.845112Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-02-19T08:23:18.854201Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-02-19T08:23:18.854289Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2026-02-19T08:23:18.855688Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-02-19T08:23:18.856173Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-02-19T08:23:18.856633Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-02-19T08:23:18.857080Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-02-19T08:23:18.857503Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-02-19T08:23:18.858012Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2026-02-19T08:23:13.449954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489012153884183:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.450483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.793785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.815697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.815808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.902735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:13.921619Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.957266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489012153884144:2081] 1771489393420335 != 1771489393420338 2026-02-19T08:23:14.044259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:14.205279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:14.222182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:14.349236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:14.451928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:14.579046Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:23:14.592952Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:23:14.631897Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.011s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:23:14.636708Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:23:14.694916Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-02-19T08:23:14.889778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:23:14.890056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-02-19T08:23:14.890250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:23:14.890269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:23:14.890289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-02-19T08:23:14.890541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-02-19T08:23:14.890558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710680:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-02-19T08:23:14.890798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-02-19T08:23:14.890832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:14.891703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:23:14.891826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-02-19T08:23:14.891971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2026-02-19T08:23:14.892031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-02-19T08:23:14.892315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:23:14.892411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:23:14.892513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:23:14.893079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-02-19T08:23:14.893157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-02-19T08:23:14.893208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2026-02-19T08:23:14.893277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2026-02-19T08:23:14.893295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2026-02-19T08:23:14.893304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2026-02-19T08:23:14.893336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2026-02-19T08:23:14.902802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710680, at schemeshard: 72057594046644480 2026-02-19T08:23:14.902834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710680, ready parts: 0/1, is published: true 2026-02-19T08:23:14.902849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710680, at schemeshard: 72057594046644480 2026-02-19T08:23 ... 4037968897 at ss 72057594046644480 2026-02-19T08:23:19.759647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.759657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.759690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.759701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.759734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.759743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.759774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.759783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.760560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608489034780760417 RawX2: 4503608217307378 } TabletId: 72075186224037888 State: 4 2026-02-19T08:23:19.760597Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:23:19.760729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608489034780760765 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2026-02-19T08:23:19.760746Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:23:19.760954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.760969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.761054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:23:19.761341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-02-19T08:23:19.761512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:23:19.761630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-02-19T08:23:19.761777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-02-19T08:23:19.761902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-02-19T08:23:19.762050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:23:19.762191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-02-19T08:23:19.762289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:23:19.762391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:23:19.762500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:23:19.762624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:23:19.762637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:23:19.762673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:23:19.762686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:23:19.762708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:23:19.762725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:7 2026-02-19T08:23:19.762734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-02-19T08:23:19.762751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:7 2026-02-19T08:23:19.762767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:23:19.762776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-02-19T08:23:19.762795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:23:19.762803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:23:19.763097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:23:19.764297Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:23:19.764324Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-02-19T08:23:19.764337Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-02-19T08:23:19.764352Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:23:19.770567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-02-19T08:23:19.770810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:23:19.771006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:19.771132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:23:19.771246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:23:19.771393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:23:19.771512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:23:19.771524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:23:19.771567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:23:19.772816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:6 2026-02-19T08:23:19.772833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-02-19T08:23:19.772870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:23:19.772880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:23:19.772897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:23:19.772910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:23:19.772936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:23:19.777047Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-02-19T08:23:19.777079Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:23:19.777097Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |93.2%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::ReadWithRestarts >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::DropMultipleStreams >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |93.2%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: 2026-02-19T08:21:52.202870Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488663505312913:2200];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:52.205190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:52.470616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:52.470710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:52.558188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:52.593847Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488663505312739:2081] 1771489312176243 != 1771489312176246 2026-02-19T08:21:52.620651Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:52.621952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:52.876379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:53.022304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:53.022328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:53.022335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:53.022426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:53.217903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:54.068640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:54.119642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.268698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.441623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.514498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.289814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488663505312913:2200];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:57.290127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:57.423569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488684980151117:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.423679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.424132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488684980151127:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.424179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.008558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.059985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.088509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.117009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.145434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.186252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.228645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.279028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.448648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488689275119297:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.448734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.449153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488689275119302:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.449163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488689275119303:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.449197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.454248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:58.473734Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488689275119306:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:58.554993Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488689275119357:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_co ... 608488987003708400:2551] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=336;columns=1; 2026-02-19T08:23:10.273688Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7608488999183533557:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:10.275267Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:10.298695Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:10.473731Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:10.477866Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:10.529862Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7608488999183533527:2081] 1771489390270320 != 1771489390270323 2026-02-19T08:23:10.561307Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:10.561434Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:10.567105Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:10.681150Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:10.681178Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:10.681191Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:10.681302Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:10.775951Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:11.280688Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:11.872735Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:15.277885Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7608488999183533557:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:15.278028Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:16.403686Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608489024953338204:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:16.403854Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:16.404485Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608489024953338217:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:16.404547Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608489024953338218:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:16.404756Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:16.415606Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:16.441254Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7608489024953338221:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:23:16.518971Z node 12 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [12:7608489024953338273:2545] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=344;columns=1; 2026-02-19T08:23:18.474807Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:18.474949Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:18.661666Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:18.669983Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7608489031329869903:2081] 1771489398321611 != 1771489398321614 2026-02-19T08:23:18.704379Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:18.704505Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:18.719744Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:18.771344Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:18.927315Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:18.927345Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:18.927360Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:18.927479Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:19.441906Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:19.815168Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:25.379693Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489061394641855:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:25.379694Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489061394641889:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:25.379853Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:25.380417Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489061394641893:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:25.380549Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:25.387031Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:25.412497Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7608489061394641892:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:23:25.501773Z node 13 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [13:7608489061394641945:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; >> TLocksFatTest::ShardLocks [GOOD] >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TCdcStreamTests::DropMultipleStreams [GOOD] >> TCdcStreamTests::Attributes >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::SchemaChanges >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2026-02-19T08:23:06.994874Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488980056693605:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:06.995067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:07.472148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.511727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:07.511823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:07.565875Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488980056693577:2081] 1771489386991748 != 1771489386991751 2026-02-19T08:23:07.571076Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:07.585651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:07.681799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.908809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:07.944180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:08.026825Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:08.032495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:08.248862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:08.320198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:11.993375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488980056693605:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:11.993445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:13.146112Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.146284Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:13.284871Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.289919Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489012142440999:2081] 1771489393014457 != 1771489393014460 2026-02-19T08:23:13.337641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.337959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.350669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:13.409808Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.554639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.561168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:13.572804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.645077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.696875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:14.094494Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:18.952245Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608489030268807999:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:18.952301Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:19.004104Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:19.117588Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:19.118922Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489030268807969:2081] 1771489398949427 != 1771489398949430 2026-02-19T08:23:19.132206Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:19.132291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:19.132555Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:19.155990Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:19.324055Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:19.473198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:19.479506Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:19.495538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.587058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.699943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.988963Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:24.684311Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608489058265244009:2151];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:24.684395Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:24.733873Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:24.892932Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:24.898029Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608489058265243896:2081] 1771489404679976 != 1771489404679979 2026-02-19T08:23:24.920461Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:24.920563Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:24.973804Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:24.977137Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:25.230218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:25.238960Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:25.258200Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.372679Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.427794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.529703Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:25.696443Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> KqpScan::ScanRetryRead >> TCancelTx::ImmediateReadOnly [GOOD] >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> Describe::DescribePartitionPermissions [GOOD] >> DirectReadWithServer::KillPQTablet >> TCdcStreamTests::SchemaChanges [GOOD] >> TCdcStreamTests::RetentionPeriod |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |93.2%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> KqpScan::RemoteShardScan >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::PostgreSQL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2026-02-19T08:23:11.552722Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489003615222438:2189];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:11.553024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:12.169898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:12.226398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:12.226484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:12.312099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:12.337671Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489003615222286:2081] 1771489391484411 != 1771489391484414 2026-02-19T08:23:12.345514Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:12.458899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:12.538158Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:12.583349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:12.589431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:12.638485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:12.976639Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489007910190555:2587] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:12.976781Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489007910190555:2587] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:12.990348Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489007910190567:2596] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:12.990427Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489007910190567:2596] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:13.004434Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489007910190581:2607] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:13.004495Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489007910190581:2607] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:13.043029Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489012205157903:2627] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:13.043110Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489012205157903:2627] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:13.066847Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489012205157916:2637] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:13.066927Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489012205157916:2637] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:13.077350Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608489012205157929:2647] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:13.077421Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489012205157929:2647] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:16.389879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.390012Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:16.753920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.773820Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489024115241700:2081] 1771489396215683 != 1771489396215686 2026-02-19T08:23:16.820942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:16.829971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:16.831651Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:16.853842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:17.149251Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:17.252698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:17.259036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:17.262276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:17.339286Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:21.823191Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608489046355042246:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:21.823241Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:21.957950Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:22.137984Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:22.142043Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489046355042128:2081] 1771489401799045 != 1771489401799048 2026-02-19T08:23:22.162012Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:22.203092Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:22.203173Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:22.263783Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:22.666918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:22.675876Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:22.676228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:22.679769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:22.798397Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:23.190863Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977685:2585] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:23.190925Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977685:2585] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:23.221885Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977701:2598] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:23.221945Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977701:2598] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:23.265400Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977715:2609] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-02-19T08:23:23.265465Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977715:2609] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:23.305724Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977742:2630] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:23.305801Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977742:2630] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:23.321624Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977755:2640] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:23.321732Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977755:2640] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:23.359767Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7608489054944977769:2651] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-02-19T08:23:23.359839Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489054944977769:2651] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:26.833536Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608489065192744486:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:26.834605Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:26.874848Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:26.964117Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:26.964194Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:26.968349Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:26.974663Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608489065192744452:2081] 1771489406831506 != 1771489406831509 2026-02-19T08:23:26.999537Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:27.021344Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:27.226529Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:27.336794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:27.345351Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:27.348946Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.653222Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-02-19T08:23:27.654898Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7608489069487712700:2584] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:27.667953Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-02-19T08:23:27.668533Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7608489069487712714:2592] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-02-19T08:23:27.843721Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |93.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2026-02-19T08:23:34.553512Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2026-02-19T08:23:34.997001Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 9911676571668201898 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |93.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2026-02-19T08:22:54.181853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:54.181937Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:54.451808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:55.997833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:55.997910Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:56.077984Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:57.424628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:57.424698Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:57.511853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:59.295597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:59.295662Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:59.407367Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:00.742063Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:23:00.742148Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:00.838225Z node 4 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-02-19T08:23:01.660903Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:01.660977Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.769989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:02.958437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:23:02.958519Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:03.045174Z node 5 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[5:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-02-19T08:23:04.026238Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:04.026314Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:04.147386Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:08.704487Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:08.704567Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:08.811412Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:12.855332Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:12.855414Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:12.962826Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:14.430350Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:14.430436Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:14.599399Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:16.401651Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:16.401738Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:16.553724Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:18.248709Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:18.248799Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:18.375276Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:19.865323Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:19.865415Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:19.971360Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:21.872379Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:21.872476Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:21.987219Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:23.851402Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:23.851490Z node 14 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:23.963314Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:25.369833Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:25.369920Z node 15 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:25.464950Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:26.875218Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:26.875285Z node 16 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:27.017160Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:28.764921Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:28.765020Z node 17 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:28.880211Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:30.501180Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:30.501270Z node 18 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:30.615163Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:33.043254Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:33.043355Z node 19 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:33.256625Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:35.198525Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:35.198619Z node 20 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:35.307431Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TxUsage::WriteToTopic_Demo_42_Table >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> TGRpcStreamingTest::WriteAndFinishWorks >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TFlatTest::RejectByPerRequestSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:23:18.104758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:18.104851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:18.104914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:18.104972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:18.105020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:18.105054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:18.105098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:18.105125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:18.105185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:18.105254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:18.106217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:18.106509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:18.203971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:18.204054Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:18.219709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:18.222097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:18.222340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:18.234585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:18.235325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:18.236097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:18.236482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:18.240771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:18.240985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:18.242339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:18.242413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:18.242547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:18.242599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:18.242641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:18.242816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:18.408026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.409942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.410029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:18.410097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2026-02-19T08:23:35.890362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:23:35.890394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:23:35.890585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:23:35.890626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:23:35.890742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:23:35.890767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:23:35.890825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:23 2026-02-19T08:23:35.890849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-02-19T08:23:35.890938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:21 2026-02-19T08:23:35.890962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2026-02-19T08:23:35.896049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:19 2026-02-19T08:23:35.896102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2026-02-19T08:23:35.897190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:17 2026-02-19T08:23:35.897228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2026-02-19T08:23:35.897326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:15 2026-02-19T08:23:35.897350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2026-02-19T08:23:35.897428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:13 2026-02-19T08:23:35.897451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2026-02-19T08:23:35.897510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:11 2026-02-19T08:23:35.897532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2026-02-19T08:23:35.897593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:9 2026-02-19T08:23:35.897616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2026-02-19T08:23:35.897691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:36 2026-02-19T08:23:35.897730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2026-02-19T08:23:35.897808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:38 2026-02-19T08:23:35.897831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2026-02-19T08:23:35.898836Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:40 2026-02-19T08:23:35.898869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2026-02-19T08:23:35.898952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:32 2026-02-19T08:23:35.898975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2026-02-19T08:23:35.899029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:34 2026-02-19T08:23:35.899050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2026-02-19T08:23:35.899111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:28 2026-02-19T08:23:35.899135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2026-02-19T08:23:35.899193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:30 2026-02-19T08:23:35.899216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2026-02-19T08:23:35.899274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:26 2026-02-19T08:23:35.899297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2026-02-19T08:23:35.899355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:24 2026-02-19T08:23:35.899379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-02-19T08:23:35.899415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:57 2026-02-19T08:23:35.899439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2026-02-19T08:23:35.904516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:53 2026-02-19T08:23:35.904570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2026-02-19T08:23:35.904652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:55 2026-02-19T08:23:35.904675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2026-02-19T08:23:35.904736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:49 2026-02-19T08:23:35.904761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2026-02-19T08:23:35.904815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:47 2026-02-19T08:23:35.904837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2026-02-19T08:23:35.904897Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:51 2026-02-19T08:23:35.904919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2026-02-19T08:23:35.904981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:45 2026-02-19T08:23:35.905004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2026-02-19T08:23:35.905061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:43 2026-02-19T08:23:35.905081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2026-02-19T08:23:35.905143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:41 2026-02-19T08:23:35.905192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2026-02-19T08:23:35.905339Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2026-02-19T08:23:35.906957Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:35.907207Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 278us result status StatusPathDoesNotExist 2026-02-19T08:23:35.907399Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:23:35.908130Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 42 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-02-19T08:23:35.908222Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 42 took 101us result status StatusPathDoesNotExist 2026-02-19T08:23:35.908296Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] Test command err: 2026-02-19T08:22:58.086811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:58.086885Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:58.334229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:00.044142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:00.044199Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:00.273415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.838463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:01.838527Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.937488Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:03.302539Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:03.302608Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:03.395095Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:04.674968Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:04.675041Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:04.771041Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:06.289812Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:06.289881Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:06.390470Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:07.892932Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:07.893020Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:08.065385Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:12.462216Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:12.462302Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:12.567230Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:17.693295Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:17.693395Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:17.827233Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:19.520750Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:19.520844Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:19.723557Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:20.541420Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 11 2026-02-19T08:23:20.542301Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 11 2026-02-19T08:23:20.542394Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 11 2026-02-19T08:23:20.542622Z node 11 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [11:465:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-02-19T08:23:22.178900Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:22.179007Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:22.339615Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:24.448793Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:24.448880Z node 14 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:24.570596Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:26.430788Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:26.430866Z node 16 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:26.552344Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:28.474631Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:28.474723Z node 18 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:28.716978Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:31.363303Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:31.363399Z node 20 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:31.467585Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:33.366601Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:33.366693Z node 21 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:33.494020Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:36.953647Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:36.953776Z node 23 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:37.131730Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> KqpExplain::ComplexJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2026-02-19T08:23:15.294255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489020055445059:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:15.294563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:15.721854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:15.728066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:15.728156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:15.892233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:15.927376Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:15.933418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489020055444921:2081] 1771489395268345 != 1771489395268348 2026-02-19T08:23:15.990197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.302286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:16.493382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:16.504285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:16.518102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:16.696962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:16.880136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:16.949549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:20.292386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608489020055445059:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:20.292523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:26.651137Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489067418734068:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:26.662467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:26.676953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:26.757127Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:26.791047Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:26.791128Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:26.802209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:26.981604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:26.989277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:26.998896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:27.012158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.093783Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.206349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.669856Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:31.651850Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608489067418734068:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:31.651914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2026-02-19T08:23:10.002660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488991840495328:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:10.004385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:10.036975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:10.329355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:10.338366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:10.338454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:10.471225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488991840495279:2081] 1771489389998613 != 1771489389998616 2026-02-19T08:23:10.483585Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:10.488110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:10.578511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:10.770247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:10.791844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:10.854062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:11.015924Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:15.001949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488991840495328:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:15.002002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:20.366340Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002517 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-02-19T08:23:20.366461Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002517 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-02-19T08:23:20.369846Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608489039085137424:3133] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-02-19T08:23:21.494752Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489046680206766:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:21.494806Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:21.532650Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:21.717999Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489046680206738:2081] 1771489401441095 != 1771489401441098 2026-02-19T08:23:21.723102Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:21.726103Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:21.741593Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:21.745982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:21.785979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:21.892704Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:22.102975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:22.118264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:22.146388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:22.152243Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:22.522061Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:26.494948Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608489046680206766:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:26.495552Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:31.710185Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002405 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-02-19T08:23:31.710339Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002405 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-02-19T08:23:31.710911Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7608489089629881590:3137] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-02-19T08:23:32.904391Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:32.904562Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:33.018549Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:33.024338Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489091635188417:2081] 1771489412779859 != 1771489412779862 2026-02-19T08:23:33.050770Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:33.050841Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:33.091022Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:33.219998Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:33.346972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:33.354909Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:33.368203Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:23:33.376146Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:33.769991Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:37.589469Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7608489091635188574:2098] Handle TEvProposeTransaction 2026-02-19T08:23:37.589522Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7608489091635188574:2098] TxId# 281474976715700 ProcessProposeTransaction 2026-02-19T08:23:37.589575Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [3:7608489091635188574:2098] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7608489113110026218:2807] DataReq marker# P0 2026-02-19T08:23:37.589655Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7608489113110026218:2807] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2026-02-19T08:23:37.590415Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7608489113110026218:2807] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-02-19T08:23:37.590443Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7608489113110026218:2807] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2026-02-19T08:23:37.590480Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7608489113110026218:2807] txid# 281474976715700 SEND to# [3:7608489091635188620:2108] TSchemeCache with 2 scheme entries. DataReq marker# P2 2026-02-19T08:23:37.590624Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7608489113110026218:2807] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-02-19T08:23:37.592289Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7608489113110026218:2807] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-02-19T08:23:37.592622Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7608489113110026218:2807] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-02-19T08:23:37.592955Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:23:37.594349Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2026-02-19T08:23:37.594701Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:23:37.595805Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2026-02-19T08:23:37.596957Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:23:37.597060Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:23:37.597232Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7608489113110026218:2807] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2026-02-19T08:23:37.597276Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7608489113110026218:2807] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2026-02-19T08:23:37.597343Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7608489113110026218:2807] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2026-02-19T08:23:37.597422Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7608489113110026218:2807] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2026-02-19T08:23:37.597591Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2026-02-19T08:23:37.597640Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2026-02-19T08:23:37.598148Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2026-02-19T08:23:37.598183Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestListTenants >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-02-19T08:23:39.268667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:39.268775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:39.268843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:39.268888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:39.268932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:39.268978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:39.269015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:39.269049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:39.269122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:39.269203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:39.271075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:39.271498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:39.474675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:23:39.474769Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:39.475738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:39.509727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:39.510040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:39.510250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:39.533700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:39.534216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:39.537794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:39.539003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:39.544043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.544294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:39.545641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:39.545729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.545924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:39.545992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:39.546045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:39.546357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:39.737528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.738709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.738913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.738996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.739758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.740095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... tionByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-02-19T08:23:41.014699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:41.014842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.014902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-02-19T08:23:41.015023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:23:41.015120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:23:41.015330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:23:41.015423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:23:41.015874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:23:41.016079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:23:41.017759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:41.017828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:41.018005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:23:41.018172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:41.018209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-02-19T08:23:41.018249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-02-19T08:23:41.018485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:23:41.018529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:23:41.018641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:23:41.018689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:23:41.018754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:23:41.018794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:23:41.018836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:23:41.018876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:23:41.018913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:23:41.018947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:23:41.019023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:23:41.019069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-02-19T08:23:41.019113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:23:41.019142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:23:41.019673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:23:41.019760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:23:41.019811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:23:41.019874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:23:41.019917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:23:41.020174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:23:41.020220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:23:41.020289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:23:41.020745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:23:41.020845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:23:41.020877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:23:41.020909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:23:41.020940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:23:41.021007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:23:41.024950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:23:41.025103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:23:41.025185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:23:41.025576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:23:41.025640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:23:41.026130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:23:41.026260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:23:41.026299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:793:2782] TestWaitNotification: OK eventTxId 104 2026-02-19T08:23:41.026913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.027115Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 264us result status StatusPathDoesNotExist 2026-02-19T08:23:41.027266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2026-02-19T08:19:54.184756Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488154637096950:2271];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:54.184830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:54.876435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:54.935193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:54.935322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:54.973153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:55.058095Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:55.105798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:55.221127Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:55.373454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:55.373482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:55.373489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:55.373590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:55.530673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:56.015774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:19:56.123079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:56.123138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:56.127561Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:19:56.143653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:19:56.146560Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:19:56.153727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:56.323186Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:56.323269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:56.325848Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326033Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326102Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326256Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326310Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326365Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326418Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326509Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.326622Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:19:56.330642Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:56.576907Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:19:56.577006Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:19:56.581534Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:56.614982Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:19:56.615042Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:19:56.615251Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:19:56.615315Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:19:56.615343Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:19:56.615365Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:19:56.615393Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:19:56.615422Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:19:56.617353Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:19:56.627219Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7608488163175665351:2237] 2026-02-19T08:19:56.627269Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:19:56.640206Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Describe result: PathErrorUnknown 2026-02-19T08:19:56.640253Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Creating table 2026-02-19T08:19:56.640337Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:19:56.649341Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:19:56.663086Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7608488163175665537:2356], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:19:56.667073Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:19:56.667123Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:7608488163175665541:2310], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:19:56.685247Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608488163175665509:2342] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-02-19T08:19:56.707151Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-02-19T08:19:56.707341Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7608488163175665589:2379] 2026-02-19T08:19:56.721440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7608488163175665589:2379], schemeshard id = 72075186224037897 2026-02-19T08:19:56.787217Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:19:56.791416Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7608488163175665667:2441], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:19:56.796643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:56.819113Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:19:56.819159Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Subscribe on create table tx: 281474976720658 2026-02-19T08:19:56.848862Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Subscribe on tx: 281474976720658 registered 2026-02-19T08:19:57.124567Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:57.487118Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:7608488163175665487:2303] Owner: [2:7608488163175665486:2302]. Request: create. Transactio ... ification cookie mismatch for subscription [8:7608489061262474251:2081] 1771489405688955 != 1771489405688958 2026-02-19T08:23:25.966662Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:25.966787Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:25.983960Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:26.005927Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:26.082527Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:26.082554Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:26.082565Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:26.082691Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:26.670582Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:26.884414Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:26.894357Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:26.907017Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.030326Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.346174Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:27.555231Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:30.694091Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608489061262474281:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:30.694186Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:31.603385Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608489087032279908:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:31.603491Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:31.606981Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608489087032279918:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:31.607109Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:31.753289Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:31.849142Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:31.987041Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.089272Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.185148Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.295120Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.386832Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.476950Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:32.612730Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608489091327248106:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:32.612836Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:32.613148Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608489091327248111:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:32.613209Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608489091327248112:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:32.613338Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:32.619756Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:32.640670Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608489091327248115:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:23:32.710493Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608489091327248168:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:23:35.905727Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:36.382714Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:36.482863Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> THealthCheckTest::BridgeTwoGroups >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:23:39.318172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:39.318283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:39.318395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:39.318451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:39.318496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:39.318542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:39.318574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:39.318622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:39.318702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:39.318769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:39.319718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:39.320116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:39.576775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:23:39.576896Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:39.577888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:39.619841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:39.620002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:39.620212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:39.650270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:39.656059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:39.657100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:39.657629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:39.669046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.669291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:39.671558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:39.671653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.672021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:39.672083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:39.672137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:39.672468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:40.018300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.019617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.019801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.019927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:40.020942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... 57594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:23:41.077836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:23:41.078244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:41.078383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.078460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:23:41.078794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:23:41.078866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:23:41.079053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:23:41.079140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:23:41.081623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:41.081675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:41.081926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:41.081978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:23:41.082265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:23:41.082320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:23:41.082439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:23:41.082483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:23:41.082523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:23:41.082571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:23:41.082620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:23:41.082701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:23:41.082746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:23:41.082807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:23:41.082893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:23:41.082932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:23:41.082967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:23:41.083734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:23:41.083875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:23:41.083928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:23:41.083989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:23:41.084044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:23:41.084146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:23:41.088009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:23:41.088609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:23:41.090021Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:672:2660] Bootstrap 2026-02-19T08:23:41.091205Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:672:2660] Become StateWork (SchemeCache [1:677:2665]) 2026-02-19T08:23:41.093418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:23:41.093828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-02-19T08:23:41.093925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-02-19T08:23:41.093971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-02-19T08:23:41.095000Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:672:2660] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:23:41.098713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.099009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2026-02-19T08:23:41.099583Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:23:41.099880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:23:41.099945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:23:41.100389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:23:41.100510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:23:41.100557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:687:2675] TestWaitNotification: OK eventTxId 101 2026-02-19T08:23:41.101046Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.101325Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 314us result status StatusPathDoesNotExist 2026-02-19T08:23:41.101536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:23:38.893896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:38.894002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:38.894078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:38.894120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:38.894166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:38.894215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:38.894249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:38.894286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:38.894394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:38.894473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:38.895470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:38.895869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:39.050544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:23:39.050644Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:39.051644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:39.071196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:39.071316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:39.071539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:39.101372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:39.102222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:39.103150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:39.103609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:39.109478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.109733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:39.111934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:39.112012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:39.112331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:39.112390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:39.112442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:39.112816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:39.280681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.281869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.281986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:39.282957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.453869Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.454052Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 205us result status StatusSuccess 2026-02-19T08:23:41.454380Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.455166Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.455303Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 159us result status StatusSuccess 2026-02-19T08:23:41.455695Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.456226Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.456390Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 188us result status StatusSuccess 2026-02-19T08:23:41.456696Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:41.457204Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:41.457381Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 207us result status StatusSuccess 2026-02-19T08:23:41.457682Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2026-02-19T08:23:38.147326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489115939687324:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:38.148443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:38.460857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:38.486097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:38.486221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:38.560221Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:38.565884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489115939687282:2081] 1771489418139382 != 1771489418139385 2026-02-19T08:23:38.576679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:38.670139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:38.766684Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d21a1370680] stream accepted Name# Session ok# true peer# ipv6:[::1]:41666 2026-02-19T08:23:38.768526Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d21a1370680] facade attach Name# Session actor# [1:7608489115939687964:2371] peer# ipv6:[::1]:41666 2026-02-19T08:23:38.768569Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d21a1370680] facade write Name# Session data# peer# ipv6:[::1]:41666 2026-02-19T08:23:38.770430Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x7d21a1370680] facade write Name# Session data# peer# ipv6:[::1]:41666 grpc status# (0) message# 2026-02-19T08:23:38.774083Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d21a1370680] write finished Name# Session ok# true peer# ipv6:[::1]:41666 2026-02-19T08:23:38.774329Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-02-19T08:23:38.775791Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d21a1370680] stream done notification Name# Session ok# true peer# ipv6:[::1]:41666 2026-02-19T08:23:38.775853Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d21a1370680] write finished Name# Session ok# true peer# ipv6:[::1]:41666 2026-02-19T08:23:38.775928Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d21a1370680] stream finished Name# Session ok# true peer# ipv6:[::1]:41666 grpc status# (0) message# 2026-02-19T08:23:38.775939Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-02-19T08:23:38.776004Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d21a1370680] deregistering request Name# Session peer# ipv6:[::1]:41666 (finish done) >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::KeyedWriteSession_UserEventHandlers >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:23:41.554753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:41.554839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:41.554897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:41.554934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:41.554969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:41.555009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:41.555041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:41.555071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:41.555156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:41.555229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:41.556054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:41.556340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:41.779216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:23:41.779297Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:41.780129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:41.807923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:41.808043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:41.808230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:41.819264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:41.820094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:41.820823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:41.821243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:41.825840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:41.826022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:41.827821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:41.827896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:41.828168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:41.828218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:41.828283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:41.828586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:41.980970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.981734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.981842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.981891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.981952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.981991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:41.982475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... d__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:23:42.784539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:34: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:23:42.784700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:23:42.784866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:23:42.784941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:23:42.785772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:23:42.787960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:23:42.789146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:42.789177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:42.789388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:23:42.789495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:23:42.789611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:42.789649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:23:42.789691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:23:42.789732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:23:42.790017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:23:42.790066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:23:42.790177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:23:42.790219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:23:42.790282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:23:42.790336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:23:42.790383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:23:42.790432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:23:42.790496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:23:42.790534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:23:42.790621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:23:42.790661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:23:42.790701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:23:42.790757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:23:42.791870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:23:42.791989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:23:42.792033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:23:42.792080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:23:42.792142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:23:42.793547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:23:42.793629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:23:42.793660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:23:42.793704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:23:42.793753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:23:42.793833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:23:42.796266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:23:42.797369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:23:42.797611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:23:42.797652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:23:42.798257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:23:42.798369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:23:42.798432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:737:2725] TestWaitNotification: OK eventTxId 102 2026-02-19T08:23:42.798993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:23:42.799231Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 263us result status StatusSuccess 2026-02-19T08:23:42.799620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |93.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> KqpComputeScheduler::LeftFairShareIsDistributed+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::LeftFairShareIsDistributed-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::MultipleDatabasesPoolsQueries+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::MultipleDatabasesPoolsQueries-DefaultFairShareMode [GOOD] >> TKesusTest::TestSessionDetach >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::MultipleDatabasesPoolsQueries-DefaultFairShareMode [GOOD] >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> IncrementalBackup::E2EBackupCollection >> TKesusTest::TestAttachNewSessions >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> DirectReadWithServer::KillPQTablet [GOOD] >> DirectReadWithServer::KillPQRBTablet [GOOD] >> LocalPartition::Restarts |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |93.3%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> IncrementalBackup::BackupRestore >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachOldGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:11.917918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:12.060280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:12.080808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:12.080945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:12.081552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:12.447916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:12.448075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:12.551257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489148791376 != 1771489148791380 2026-02-19T08:19:12.561059Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:12.608257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:12.696935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:12.926764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:13.178003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:13.319645Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:19:13.319756Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:19:13.319881Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:871:2753] 2026-02-19T08:19:13.462559Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:871:2753] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:19:13.462671Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:871:2753] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:19:13.463575Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:19:13.463679Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:871:2753] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:13.464050Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:13.464201Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:871:2753] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:13.464349Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:871:2753] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:19:13.466606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:13.467184Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:19:13.468015Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:871:2753] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:19:13.468082Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:871:2753] txid# 281474976715657 SEND to# [1:853:2735] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:19:13.503312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:19:13.504360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:19:13.504686Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2774] 2026-02-19T08:19:13.505002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:13.561536Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:19:13.562692Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:13.562851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:13.564877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:13.564979Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:13.565040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:13.565477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:13.565667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:13.565822Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:911:2774] in generation 1 2026-02-19T08:19:13.576736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:13.628186Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:13.628409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:13.628563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:913:2784] 2026-02-19T08:19:13.628611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:13.628665Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:13.628706Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.628949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:895:2774], Recipient [1:895:2774]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:13.629014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:13.629420Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:13.629519Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:13.629607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:13.629662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:13.629739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:19:13.629783Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:19:13.629814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:19:13.629884Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:13.629944Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:13.633585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:898:2775], Recipient [1:895:2774]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:13.633658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:13.633725Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:894:2773], serverId# [1:898:2775], sessionId# [0:0:0] 2026-02-19T08:19:13.633850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:898:2775] 2026-02-19T08:19:13.633890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:19:13.634022Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:13.634330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:19:13.634389Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:13.634530Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transacti ... th status OK role: Leader [27:1179:2976] 2026-02-19T08:23:43.017477Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [27:1179:2976] 2026-02-19T08:23:43.017574Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037888] push event to server [27:1179:2976] 2026-02-19T08:23:43.017730Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72075186224037888] shutdown pipe due to pending shutdown request [27:1179:2976] 2026-02-19T08:23:43.017860Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [27:1179:2976] 2026-02-19T08:23:43.018188Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553210, Sender [27:1178:2975], Recipient [27:924:2787]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-02-19T08:23:43.018341Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2026-02-19T08:23:43.018483Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:43.018666Z node 27 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2026-02-19T08:23:43.018868Z node 27 :TX_DATASHARD INFO: datashard__compaction.cpp:148: Started compaction# 1 with cookie 0 of 72075186224037888 tableId# 38 localTid# 1001, requested from [27:1178:2975], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2026-02-19T08:23:43.019052Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:23:43.019203Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:43.019624Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2026-02-19T08:23:43.019742Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:16} starting compaction 2026-02-19T08:23:43.020224Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} starting Scan{1 on 1001, Compact{72075186224037888.1.16, eph 1}} 2026-02-19T08:23:43.020376Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} started compaction 1 2026-02-19T08:23:43.020480Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 14711341373050572072 2026-02-19T08:23:43.023863Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 16, product {tx status + 1 parts epoch 2} done 2026-02-19T08:23:43.024206Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2026-02-19T08:23:43.024334Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2026-02-19T08:23:43.024401Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2026-02-19T08:23:43.024993Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.523100Z 2026-02-19T08:23:43.025157Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2026-02-19T08:23:43.025267Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:43.025390Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-02-19T08:23:43.025501Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:266: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [27:1178:2975], pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:23:43.026239Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2026-02-19T08:23:43.026374Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 13888629950195108201 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 7293627118334202629 ========= Starting an immediate read ========= 2026-02-19T08:23:43.250719Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037888] send [27:1113:2921] 2026-02-19T08:23:43.250872Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037888] push event to server [27:1113:2921] 2026-02-19T08:23:43.251340Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [27:1203:2983], Recipient [27:924:2787]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-02-19T08:23:43.251597Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-02-19T08:23:43.251788Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:43.251972Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:23:43.252091Z node 27 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-02-19T08:23:43.252209Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v2502/18446744073709551615 2026-02-19T08:23:43.252376Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-02-19T08:23:43.252608Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:23:43.252715Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:23:43.252804Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:23:43.252892Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:23:43.252973Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-02-19T08:23:43.253066Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:23:43.253124Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:23:43.253153Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:23:43.253182Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:23:43.253399Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:23:43.253756Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is DelayComplete 2026-02-19T08:23:43.253821Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:23:43.253920Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:23:43.254000Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:23:43.254071Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:23:43.254101Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:23:43.254149Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-02-19T08:23:43.254252Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:23:43.254413Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:23:43.254594Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:43.389925Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-02-19T08:23:43.390130Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:43.390449Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{16, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-02-19T08:23:43.390604Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:43.391574Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2026-02-19T08:23:43.392045Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [27:542:2484] 2026-02-19T08:23:43.392167Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72057594046382081] push event to server [27:542:2484] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> IncrementalBackup::MultiBackup >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestAcquireTimeout >> Secret::Validation [GOOD] >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> IncrementalBackup::SimpleBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2026-02-19T08:23:45.442334Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:45.442482Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:45.462389Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:45.462500Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:45.477630Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:45.478249Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=13400967814908495818, session=0, seqNo=0) 2026-02-19T08:23:45.478474Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:45.502639Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=13400967814908495818, session=1) 2026-02-19T08:23:45.504486Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:135:2159], cookie=7560714511436783160, session=2) 2026-02-19T08:23:45.504571Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:135:2159], cookie=7560714511436783160) 2026-02-19T08:23:45.505035Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:135:2159], cookie=5929359737507978766 2026-02-19T08:23:45.505603Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=2436266666599295547, session=1, seqNo=0) 2026-02-19T08:23:45.522705Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=2436266666599295547, session=1) 2026-02-19T08:23:45.523167Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:45.523363Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:45.523481Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:45.523694Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:135:2159], cookie=8760719311272358867, session=1) 2026-02-19T08:23:45.534127Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-02-19T08:23:45.534222Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:23:45.534269Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-02-19T08:23:45.549659Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-02-19T08:23:45.549791Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:135:2159], cookie=8760719311272358867) 2026-02-19T08:23:45.549853Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-02-19T08:23:46.156968Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:46.157077Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:46.174604Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:46.174694Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:46.217111Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:46.217505Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:134:2159], cookie=11806628002571142140, path="") 2026-02-19T08:23:46.234805Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:134:2159], cookie=11806628002571142140, status=SUCCESS) 2026-02-19T08:23:46.235608Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:144:2166], cookie=111, session=0, seqNo=0) 2026-02-19T08:23:46.235762Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:46.235973Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:144:2166], cookie=14285656058142214144, session=1) 2026-02-19T08:23:46.246371Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-02-19T08:23:46.246458Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:23:46.263123Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:144:2166], cookie=111, session=1) 2026-02-19T08:23:46.263224Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:144:2166], cookie=14285656058142214144) 2026-02-19T08:23:46.263273Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-02-19T08:23:46.809556Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:46.809688Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:46.865383Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:46.865536Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:46.910648Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:46.911313Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=17527685295216248558, session=0, seqNo=0) 2026-02-19T08:23:46.911486Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:46.926841Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=17527685295216248558, session=1) 2026-02-19T08:23:46.927653Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:137:2161], cookie=15914067913176383700, session=1) 2026-02-19T08:23:46.927780Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:23:46.943194Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:137:2161], cookie=15914067913176383700) 2026-02-19T08:23:46.944172Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=13626826999291829861) 2026-02-19T08:23:46.944268Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=13626826999291829861) 2026-02-19T08:23:46.944938Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:157:2179], cookie=496425978165148817, session=0, seqNo=0) 2026-02-19T08:23:46.945074Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:46.958076Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:157:2179], cookie=496425978165148817, session=2) 2026-02-19T08:23:46.959415Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:137:2161], cookie=8498594891310162653, session=2) 2026-02-19T08:23:46.959566Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2026-02-19T08:23:46.972181Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:137:2161], cookie=8498594891310162653) 2026-02-19T08:23:47.639050Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:47.639172Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:47.661989Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:47.662137Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:47.697878Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:47.698493Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=15601673125450239916, session=0, seqNo=0) 2026-02-19T08:23:47.698658Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:47.710946Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=15601673125450239916, session=1) 2026-02-19T08:23:47.711324Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:136:2161], cookie=6381820076326700463, session=0, seqNo=0) 2026-02-19T08:23:47.711481Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:47.726533Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:136:2161], cookie=6381820076326700463, session=2) 2026-02-19T08:23:47.727157Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:148:2170], cookie=12339460592883544523, name="Sem1", limit=1) 2026-02-19T08:23:47.727324Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:47.741430Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:148:2170], cookie=12339460592883544523) 2026-02-19T08:23:47.742112Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:153:2175], cookie=1247809174376457505, name="Sem1", force=0) 2026-02-19T08:23:47.742232Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-02-19T08:23:47.756297Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:153:2175], cookie=1247809174376457505) 2026-02-19T08:23:47.756836Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:158:2180], cookie=7228999269718676621, name="Sem1", limit=1) 2026-02-19T08:23:47.756975Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-02-19T08:23:47.769233Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:158:2180], cookie=7228999269718676621) 2026-02-19T08:23:47.769740Z node 4 :KESUS_ ... SemaphoreCreate::Complete (sender=[4:249:2270], cookie=1290667437826232582) 2026-02-19T08:23:48.042413Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:48.042593Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2026-02-19T08:23:48.059917Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:136:2161], cookie=111) 2026-02-19T08:23:48.060408Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-02-19T08:23:48.073399Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:136:2161], cookie=222) 2026-02-19T08:23:48.073983Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:136:2161], cookie=333, name="Sem1") 2026-02-19T08:23:48.074122Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2026-02-19T08:23:48.096968Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:136:2161], cookie=333) 2026-02-19T08:23:48.097607Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:136:2161], cookie=444, session=2, semaphore="Sem1" count=1) 2026-02-19T08:23:48.117594Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:136:2161], cookie=444) 2026-02-19T08:23:48.118235Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:136:2161], cookie=555, name="Sem1") 2026-02-19T08:23:48.118369Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2026-02-19T08:23:48.118454Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2026-02-19T08:23:48.132977Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:136:2161], cookie=555) 2026-02-19T08:23:48.857663Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:48.857812Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:48.876827Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:48.877327Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:48.914551Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:48.915138Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=16780431730996164238, session=0, seqNo=0) 2026-02-19T08:23:48.915332Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:48.928755Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=16780431730996164238, session=1) 2026-02-19T08:23:48.929148Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=112, name="Sem1", limit=5) 2026-02-19T08:23:48.929308Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:48.941661Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=112) 2026-02-19T08:23:48.942072Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=113, name="Sem1") 2026-02-19T08:23:48.954778Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=113) 2026-02-19T08:23:48.955139Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=114, name="Sem1", force=0) 2026-02-19T08:23:48.955242Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-02-19T08:23:48.968142Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=114) 2026-02-19T08:23:48.968442Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:136:2161], cookie=14099345769185749988 2026-02-19T08:23:48.968725Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=115, name="Sem1", limit=5) 2026-02-19T08:23:48.982460Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=115) 2026-02-19T08:23:48.982817Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=116, name="Sem1") 2026-02-19T08:23:48.996897Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=116) 2026-02-19T08:23:48.997393Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=117, name="Sem1", force=0) 2026-02-19T08:23:49.010674Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=117) 2026-02-19T08:23:49.011027Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=118, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:49.023720Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=118) 2026-02-19T08:23:49.024095Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=119, name="Sem1") 2026-02-19T08:23:49.039103Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=119) 2026-02-19T08:23:49.039503Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=120, name="Sem1") 2026-02-19T08:23:49.039608Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=120) 2026-02-19T08:23:49.039907Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:136:2161], cookie=14886282471971376163, session=1) 2026-02-19T08:23:49.040039Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:23:49.053587Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:136:2161], cookie=14886282471971376163) 2026-02-19T08:23:49.053960Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=121, name="Sem1", limit=5) 2026-02-19T08:23:49.071004Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=121) 2026-02-19T08:23:49.071400Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=122, name="Sem1") 2026-02-19T08:23:49.084248Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=122) 2026-02-19T08:23:49.084600Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=123, name="Sem1", force=0) 2026-02-19T08:23:49.098576Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=123) 2026-02-19T08:23:49.098939Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=124, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:49.111297Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=124) 2026-02-19T08:23:49.111634Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=125, name="Sem1") 2026-02-19T08:23:49.136831Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=125) 2026-02-19T08:23:49.137174Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=126, name="Sem1") 2026-02-19T08:23:49.137266Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=126) 2026-02-19T08:23:49.137903Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=127, name="Sem1", limit=5) 2026-02-19T08:23:49.137980Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=127) 2026-02-19T08:23:49.138185Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=128, name="Sem1") 2026-02-19T08:23:49.138246Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=128) 2026-02-19T08:23:49.138440Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=129, name="Sem1", force=0) 2026-02-19T08:23:49.138508Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=129) 2026-02-19T08:23:49.138700Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=130, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:49.138766Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=130) 2026-02-19T08:23:49.138963Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=131, name="Sem1") 2026-02-19T08:23:49.139018Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=131) 2026-02-19T08:23:49.139210Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=132, name="Sem1") 2026-02-19T08:23:49.139265Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=132) >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_BulkUpsert+IsOlap >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAcquireRepeat >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:44.594673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:44.719732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:44.739712Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:44.739794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:20:44.740304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:45.081383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:45.081535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:45.158074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489241332701 != 1771489241332705 2026-02-19T08:20:45.168978Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:45.221036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:45.339393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:45.952136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:45.952195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:45.952245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:45.954804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:45.955658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:45.970382Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:20:57.955866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:964:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:57.956053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:57.956444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:20:57.956568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-02-19T08:21:08.313133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.313744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.314965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1000:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.315184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.320406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:08.487754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1106:2927], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.487944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.488323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1110:2931], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.488389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.488486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1112:2933], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:08.493129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:08.616970Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1115:2936], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:21:08.923819Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1206:3000] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:09.484569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:09.961696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:10.572615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:11.234365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:11.776852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:12.868502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:13.157301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:21:13.516370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:13.516443Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE ...
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:21:52.157934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.441329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:55.859716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715729:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:56.668466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:1: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:22:24.268017Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=18; 2026-02-19T08:22:24.268204Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 18 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:22:24.268381Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 18 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:22:24.268606Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [1:3793:4938], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3708:4938]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3793:4938].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:22:24.269059Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:3787:4938], SessionActorId: [1:3708:4938], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3708:4938]. 2026-02-19T08:22:24.269418Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=NDhiMmQ3MTctNGJhZmU3YmYtYjM1ODczMzktNGM2NDMwOWQ=, ActorId: [1:3708:4938], ActorState: ExecuteState, LegacyTraceId: 01khtfwep0cegkew7frgm6antg, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3788:4938] from: [1:3787:4938] trace_id# 2026-02-19T08:22:24.269579Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3788:4938] TxId: 281474976715763. Ctx: { TraceId: 01khtfwep0cegkew7frgm6antg, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDhiMmQ3MTctNGJhZmU3YmYtYjM1ODczMzktNGM2NDMwOWQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:22:24.269916Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NDhiMmQ3MTctNGJhZmU3YmYtYjM1ODczMzktNGM2NDMwOWQ=, ActorId: [1:3708:4938], ActorState: ExecuteState, LegacyTraceId: 01khtfwep0cegkew7frgm6antg, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:22:24.289300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01khtfwefm7hs1at8j79qx85ng" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NDhiMmQ3MTctNGJhZmU3YmYtYjM1ODczMzktNGM2NDMwOWQ=" tx_control { tx_id: "01khtfwefm7hs1at8j79qx85ng" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-02-19T08:22:48.045364Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4331:5425], for# root@builtin, access# DescribeSchema 2026-02-19T08:22:48.045504Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4331:5425], for# root@builtin, access# DescribeSchema 2026-02-19T08:22:48.047973Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:4328:5422], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:48.052517Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MTNkYTcyOTctZWJkZWMyNTgtZmRhZDc0Zi02MDNkNjQ0MQ==, ActorId: [1:4324:5419], ActorState: ExecuteState, LegacyTraceId: 01khtfx5yj2h6vcytrhqvzfjn9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:23:00.889500Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:1: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachThenReRegister [GOOD] Test command err: 2026-02-19T08:23:48.126855Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:48.127008Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:48.146953Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:48.147083Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:48.162165Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:48.162782Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=6816434410079775672, session=0, seqNo=0) 2026-02-19T08:23:48.162968Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:48.185793Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=6816434410079775672, session=1) 2026-02-19T08:23:48.186148Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=16616111683084843633, session=0, seqNo=0) 2026-02-19T08:23:48.186296Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:48.199636Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=16616111683084843633, session=2) 2026-02-19T08:23:48.695898Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:48.695984Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:48.731145Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:48.731324Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:48.780658Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:48.781737Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=14373147463900728331, session=0, seqNo=0) 2026-02-19T08:23:48.781925Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:48.798672Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=14373147463900728331, session=1) 2026-02-19T08:23:49.437663Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:49.437806Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:49.462825Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:49.463049Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:49.504395Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:49.505283Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=11900813368182798844, session=0, seqNo=222) 2026-02-19T08:23:49.505440Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:49.519573Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=11900813368182798844, session=1) 2026-02-19T08:23:49.519974Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=4178520135835555788, session=1, seqNo=111) 2026-02-19T08:23:49.533383Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=4178520135835555788, session=1) 2026-02-19T08:23:50.054556Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:50.054681Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:50.087705Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:50.087961Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:50.123947Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:50.124572Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=111, session=0, seqNo=42) 2026-02-19T08:23:50.124751Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:50.124948Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=222, session=1, seqNo=41) 2026-02-19T08:23:50.137189Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=111, session=1) 2026-02-19T08:23:50.137276Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=222, session=1) 2026-02-19T08:23:50.580317Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:50.580438Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:50.601488Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:50.602056Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:50.646603Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:50.647244Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=14912490586188862111, session=0, seqNo=0) 2026-02-19T08:23:50.647421Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:50.666404Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=14912490586188862111, session=1) 2026-02-19T08:23:50.668186Z node 5 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[5:154:2176], cookie=4218219883024713539) 2026-02-19T08:23:50.668290Z node 5 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[5:154:2176], cookie=4218219883024713539) >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireSemaphore >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [GOOD] >> Secret::ValidationQueryService [GOOD] >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestAcquireSemaphore [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestMergeConfig >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestAcquireLocks >> TKesusTest::TestAttachMissingSession >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachFastPath ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:20:48.176916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:20:48.316981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:20:48.380218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:20:48.380321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:20:48.381062Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:20:48.690332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:20:48.690507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:20:48.769035Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489244918891 != 1771489244918895 2026-02-19T08:20:48.782966Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:20:48.835763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:20:48.951602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:20:49.515857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:20:49.515931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:20:49.515971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:20:49.518928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:20:49.519903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:20:49.534979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:21:01.378173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:957:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:01.378321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:01.378409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:01.379537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:973:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:01.379776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:01.384346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:01.405889Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:971:2831], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:21:01.459396Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1024:2865] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:01.502761Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:1034:2874], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2026-02-19T08:21:01.504583Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YzJiY2Y2NzItMmZkYjI5NGMtOGViNTMyOS0yYzNmODA5YQ==, ActorId: [1:955:2821], ActorState: ExecuteState, LegacyTraceId: 01khtfsxsm2axkvrb39gp49z5q, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 column: 20 } message: "mismatched input \'-\' expecting \'(\'" end_position { row: 1 column: 20 } severity: 1 } tx_id# trace_id# REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-02-19T08:21:11.936754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:12.718812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:13.137133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:13.782408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:14.812195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:15.414847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:21:16.056401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:17.080106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:21:17.986184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:21:17.986267Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:19.348533Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NWQzNGI4YmUtOThkZmZmYTctZmVmNjM1NzctYzU1NmU1MzI=, ActorId: [1:1051:2883], ActorState: ExecuteState, LegacyTraceId: 01khtft7vtac8cmw3k2z6pjh2b, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 not found for alter" severity: 1 } } trace_id# REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:21:55.885523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB ... t called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:01.085597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715731:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2026-02-19T08:22:14.688410Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YzA4MmY2YjktOTI4ZmNmNjMtZTVmYjRiMDUtYjE4ODNkYzM=, ActorId: [1:3247:4580], ActorState: ExecuteState, LegacyTraceId: 01khtfw4jgamq0zpwkd5ebe7gk, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET_ACCESS\"" severity: 1 issues { message: "preparation problem: used in access secret secret2 not found" severity: 1 } } trace_id# REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Executing operation with object "SECRET_ACCESS"
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-02-19T08:22:27.180045Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=15; 2026-02-19T08:22:27.180242Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:22:27.180369Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:22:27.180613Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [1:3595:4785], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3510:4785]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3595:4785].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:22:27.208389Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:3589:4785], SessionActorId: [1:3510:4785], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3510:4785]. 2026-02-19T08:22:27.208960Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=NTMxMjI5NzgtYzE2ZDVlZDctMTU0MWEyZDktNDRkYjFkOWI=, ActorId: [1:3510:4785], ActorState: ExecuteState, LegacyTraceId: 01khtfwhgw4pqe9872h0fh5z34, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3590:4785] from: [1:3589:4785] trace_id# 2026-02-19T08:22:27.209174Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3590:4785] TxId: 281474976715757. Ctx: { TraceId: 01khtfwhgw4pqe9872h0fh5z34, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTMxMjI5NzgtYzE2ZDVlZDctMTU0MWEyZDktNDRkYjFkOWI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:22:27.209527Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NTMxMjI5NzgtYzE2ZDVlZDctMTU0MWEyZDktNDRkYjFkOWI=, ActorId: [1:3510:4785], ActorState: ExecuteState, LegacyTraceId: 01khtfwhgw4pqe9872h0fh5z34, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:22:27.217647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01khtfwh9e249643e6bg5hxerq" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NTMxMjI5NzgtYzE2ZDVlZDctMTU0MWEyZDktNDRkYjFkOWI=" tx_control { tx_id: "01khtfwh9e249643e6bg5hxerq" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-02-19T08:22:39.219119Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YjU5YjkzNTEtZjE2YTM1N2ItNmFkYzRhOWQtYTkxOTM2YmI=, ActorId: [1:3770:4986], ActorState: ExecuteState, LegacyTraceId: 01khtfwwq828t3gv1sm3918wqs, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 using in access for test@test1" severity: 1 } } trace_id# REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-02-19T08:22:50.953485Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4094:5243], for# root@builtin, access# DescribeSchema 2026-02-19T08:22:50.953643Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4094:5243], for# root@builtin, access# DescribeSchema 2026-02-19T08:22:50.964584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:4091:5240], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:50.974401Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YmI1MjNmODEtMmMzZWMwMjUtNjYwNWQxZTYtNjQzYWM3MDk=, ActorId: [1:4085:5235], ActorState: ExecuteState, LegacyTraceId: 01khtfx8s6eb7w2v032j3n749t, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:23:04.071743Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1035: Ticket **** (51449FAE): Could not find correct token validator 2026-02-19T08:23:05.053970Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZjIyMmY4NjYtYTVjZmQyMjEtMzlhYjgxMWMtN2NjNjdhNmI=, ActorId: [1:4355:5448], ActorState: ExecuteState, LegacyTraceId: 01khtfxnkzbpbvjke4x3nx02fv, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot CREATE objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-02-19T08:23:18.692485Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YzkyNjFhNmYtZTRjNWVkYS1kZGE5MGJjLTYwZDJiYTdm, ActorId: [1:4740:5753], ActorState: ExecuteState, LegacyTraceId: 01khtfy2kg69azzv5arps0hgsa, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot UPSERT objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TLocksTest::Range_BrokenLock3 [GOOD] >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> BasicUsage::KeyedWriteSession_UserEventHandlers [GOOD] >> BasicUsage::KeyedWriteSession_ProducerIdPrefixRequired >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAllocatesResources >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> TKesusTest::TestSemaphoreData [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestAllocatesResources [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2026-02-19T08:22:59.805891Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488951349209318:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:59.806305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:00.458610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:00.458695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:00.503351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:00.559299Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:00.579994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:00.690027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:00.817957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:00.899334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.005413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:01.257394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:01.330853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:04.562260Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608488972992203125:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:04.562853Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:04.596349Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:04.701835Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:04.703589Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488972992203077:2081] 1771489384553599 != 1771489384553602 2026-02-19T08:23:04.711089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:04.711213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:04.729129Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:04.899193Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:04.971209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:04.981395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:05.051410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.119042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:05.164739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:08.783325Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488988642086544:2230];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:08.783396Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:08.894471Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.098409Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:09.117481Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:09.117569Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:09.118409Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608488988642086352:2081] 1771489388768091 != 1771489388768094 2026-02-19T08:23:09.193835Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.195046Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:09.637038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:09.646220Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:09.665906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:09.680939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:09.753604Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.801652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:09.859499Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:09.896789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.824204Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608489010448689672:2141];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.824724Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.838420Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:13.952885Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.952980Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.956083Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.964110Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7 ... tDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:30.997878Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:31.120001Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:31.284015Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:37.130326Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608489115296828733:2263];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:37.130534Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:37.130708Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:37.297253Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:37.362704Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:37.362812Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:37.369865Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:37.373936Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608489115296828486:2081] 1771489417049020 != 1771489417049023 2026-02-19T08:23:37.391490Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:37.525856Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:37.816329Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:37.838597Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:37.931334Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:37.995150Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:38.118285Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:43.990167Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608489139457580327:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:43.990235Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:44.186084Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:44.203153Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:44.203154Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608489139457580286:2081] 1771489423988219 != 1771489423988222 2026-02-19T08:23:44.244642Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:44.244745Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:44.263166Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:44.425656Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:44.546590Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:44.561278Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:44.566983Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:44.649224Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:44.793214Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:45.001966Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:49.855528Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608489165737130430:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:49.855864Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:49.932511Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:50.055498Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:50.057856Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608489165737130401:2081] 1771489429853785 != 1771489429853788 2026-02-19T08:23:50.084371Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:50.084497Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:50.107870Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:50.192554Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:50.558910Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:50.567981Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:50.585974Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:50.595686Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:50.686729Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:50.795254Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:50.863277Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreData [GOOD] Test command err: 2026-02-19T08:23:52.728243Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:52.728386Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:52.761951Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:52.762446Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:52.814138Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:52.815487Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 2026-02-19T08:23:52.815983Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([1:193:2162]) 2026-02-19T08:23:53.608626Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:53.608741Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:53.632963Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:53.633580Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2026-02-19T08:23:53.670428Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 7560239748334505824 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2026-02-19T08:23:53.671430Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 4 2026-02-19T08:23:53.672114Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([3:194:2162]) 2026-02-19T08:23:54.808086Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:54.808173Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:54.822674Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:54.823144Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:54.857436Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:54.857909Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=2252300863830863602, session=0, seqNo=0) 2026-02-19T08:23:54.858050Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:54.870064Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=2252300863830863602, session=1) 2026-02-19T08:23:54.870409Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=5831938946915580973, session=0, seqNo=0) 2026-02-19T08:23:54.870549Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:54.882512Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=5831938946915580973, session=2) 2026-02-19T08:23:54.883024Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=111, name="Lock1") 2026-02-19T08:23:54.895217Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=111) 2026-02-19T08:23:54.895570Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:54.895751Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:54.895847Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:54.907960Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-02-19T08:23:54.908222Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=333, name="Lock1") 2026-02-19T08:23:54.920287Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=333) 2026-02-19T08:23:55.342168Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:55.342280Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:55.368843Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:55.369154Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:55.393423Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:55.393909Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[6:137:2161], cookie=18364888811729883830, session=0, seqNo=0) 2026-02-19T08:23:55.394035Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:55.406540Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[6:137:2161], cookie=18364888811729883830, session=1) 2026-02-19T08:23:55.406857Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[6:137:2161], cookie=17786026396487537262, session=0, seqNo=0) 2026-02-19T08:23:55.406986Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:55.421389Z node 6 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[6:137:2161], cookie=17786026396487537262, session=2) 2026-02-19T08:23:55.421980Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[6:148:2170], cookie=6496110033940752574, name="Sem1", limit=1) 2026-02-19T08:23:55.422131Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:55.435597Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[6:148:2170], cookie=6496110033940752574) 2026-02-19T08:23:55.435927Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:55.436079Z node 6 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:23:55.436280Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[6:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-02-19T08:23:55.458071Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:137:2161], cookie=111) 2026-02-19T08:23:55.458169Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[6:137:2161], cookie=222) 2026-02-19T08:23:55.458859Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:156:2178], cookie=5048908779593103379, name="Sem1") 2026-02-19T08:23:55.458981Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:156:2178], cookie=5048908779593103379) 2026-02-19T08:23:55.459476Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:159:2181], cookie=7389711917585976518, name="Sem1") 2026-02-19T08:23:55.459560Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:159:2181], cookie=7389711917585976518) 2026-02-19T08:23:55.459854Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:137:2161], cookie=333, name="Sem1") 2026-02-19T08:23:55.459969Z node 6 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2026-02-19T08:23:55.478382Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:137:2161], cookie=333) 2026-02-19T08:23:55.479235Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:164:2186], cookie=17694798516228503966, name="Sem1") 2026-02-19T08:23:55.479334Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:164:2186], cookie=17694798516228503966) 2026-02-19T08:23:55.479802Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:167:2189], cookie=5726725353870798070, name="Sem1") 2026-02-19T08:23:55.479871Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:167:2189], cookie=5726725353870798070) 2026-02-19T08:23:55.480172Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[6:137:2161], cookie=444, name="Sem1") 2026-02-19T08:23:55.480268Z node 6 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-02-19T08:23:55.499890Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[6:137:2161], cookie=444) 2026-02-19T08:23:55.500598Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:172:2194], cookie=9930655272452537349, name="Sem1") 2026-02-19T08:23:55.500696Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:172:2194], cookie=9930655272452537349) 2026-02-19T08:23:55.501207Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[6:175:2197], cookie=15600076347962287656, name="Sem1") 2026-02-19T08:23:55.501279Z node 6 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[6:175:2197], cookie=15600076347962287656) 2026-02-19T08:23:55.994466Z node 7 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:55.994589Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.013772Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.014343Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.056066Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.056562Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[7:136:2161], cookie=3788539970153953658, name="Sem1", limit=1) 2026-02-19T08:23:56.056745Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:56.068943Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[7:136:2161], cookie=3788539970153953658) 2026-02-19T08:23:56.069542Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[7:146:2168], cookie=12783902853161588166, name="Sem2", limit=1) 2026-02-19T08:23:56.069702Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2026-02-19T08:23:56.082170Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[7:146:2168], cookie=12783902853161588166) 2026-02-19T08:23:56.082753Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:151:2173], cookie=2244090949583561599, name="Sem1") 2026-02-19T08:23:56.082843Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:151:2173], cookie=2244090949583561599) 2026-02-19T08:23:56.083296Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:154:2176], cookie=16150035701994764741, name="Sem2") 2026-02-19T08:23:56.083370Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:154:2176], cookie=16150035701994764741) 2026-02-19T08:23:56.094803Z node 7 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:56.094908Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.095296Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.095736Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.142465Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.142871Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:193:2206], cookie=6227556945355139124, name="Sem1") 2026-02-19T08:23:56.142963Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:193:2206], cookie=6227556945355139124) 2026-02-19T08:23:56.143402Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:199:2211], cookie=8561139212271359047, name="Sem2") 2026-02-19T08:23:56.143472Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:199:2211], cookie=8561139212271359047) 2026-02-19T08:23:56.143928Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[7:202:2214], cookie=10153358687170660406, name="Sem1", limit=1) 2026-02-19T08:23:56.156434Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[7:202:2214], cookie=10153358687170660406) 2026-02-19T08:23:56.156916Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[7:207:2219], cookie=3510156780499831811, name="Sem2", limit=1) 2026-02-19T08:23:56.169355Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[7:207:2219], cookie=3510156780499831811) 2026-02-19T08:23:56.170108Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:212:2224], cookie=9275351965124070638, name="Sem1") 2026-02-19T08:23:56.170200Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:212:2224], cookie=9275351965124070638) 2026-02-19T08:23:56.170708Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:215:2227], cookie=2856856152634720533, name="Sem2") 2026-02-19T08:23:56.170778Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:215:2227], cookie=2856856152634720533) 2026-02-19T08:23:56.171296Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[7:218:2230], cookie=2417969230056035653, name="Sem1") 2026-02-19T08:23:56.183859Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[7:218:2230], cookie=2417969230056035653) 2026-02-19T08:23:56.184503Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[7:223:2235], cookie=6373335936111532903, name="Sem2") 2026-02-19T08:23:56.196759Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[7:223:2235], cookie=6373335936111532903) 2026-02-19T08:23:56.197272Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[7:228:2240], cookie=14935719975232613092, name="Sem3") 2026-02-19T08:23:56.209352Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[7:228:2240], cookie=14935719975232613092) 2026-02-19T08:23:56.209976Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:233:2245], cookie=17946078503342644141, name="Sem1") 2026-02-19T08:23:56.210081Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:233:2245], cookie=17946078503342644141) 2026-02-19T08:23:56.210655Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:236:2248], cookie=2246049911271774960, name="Sem2") 2026-02-19T08:23:56.210725Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:236:2248], cookie=2246049911271774960) 2026-02-19T08:23:56.211196Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:239:2251], cookie=14648659707325764772, name="Sem3") 2026-02-19T08:23:56.211257Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:239:2251], cookie=14648659707325764772) 2026-02-19T08:23:56.225888Z node 7 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:56.226025Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.226537Z node 7 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.227286Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.264999Z node 7 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.265438Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:278:2281], cookie=17383381915989973032, name="Sem1") 2026-02-19T08:23:56.265524Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:278:2281], cookie=17383381915989973032) 2026-02-19T08:23:56.266301Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:284:2286], cookie=992039290648906293, name="Sem2") 2026-02-19T08:23:56.266380Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:284:2286], cookie=992039290648906293) 2026-02-19T08:23:56.267054Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[7:287:2289], cookie=7513941167861201232, name="Sem3") 2026-02-19T08:23:56.267134Z node 7 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[7:287:2289], cookie=7513941167861201232) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2026-02-19T08:23:54.026244Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:54.026399Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:54.047661Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:54.047773Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:54.064297Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:54.064949Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=11580947189891358982, session=1, seqNo=0) 2026-02-19T08:23:54.088088Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=11580947189891358982, session=1) 2026-02-19T08:23:54.466693Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:54.466796Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:54.485538Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:54.485660Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:54.524586Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:54.524984Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:134:2159], cookie=5798015935126849765, path="") 2026-02-19T08:23:54.538474Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:134:2159], cookie=5798015935126849765, status=SUCCESS) 2026-02-19T08:23:54.539272Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:144:2166], cookie=12068608075650238497, session=0, seqNo=0) 2026-02-19T08:23:54.539365Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:54.551292Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:144:2166], cookie=12068608075650238497, session=1) 2026-02-19T08:23:54.552037Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:145:2167], cookie=111, session=0, seqNo=0) 2026-02-19T08:23:54.552146Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:54.552308Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[2:145:2167], cookie=222, seqNo=0 2026-02-19T08:23:54.564255Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:145:2167], cookie=111, session=2) 2026-02-19T08:23:54.932536Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:54.932642Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:54.944990Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:54.945127Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:54.975833Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:54.976305Z node 3 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[3:137:2161], cookie=636469248348720650, path="") 2026-02-19T08:23:54.990919Z node 3 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[3:137:2161], cookie=636469248348720650, status=SUCCESS) 2026-02-19T08:23:54.991925Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:146:2168], cookie=12855503521232879227, session=0, seqNo=0) 2026-02-19T08:23:54.992063Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:55.007905Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:146:2168], cookie=12855503521232879227, session=1) 2026-02-19T08:23:55.008688Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:146:2168], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:55.008848Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:55.008944Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:55.009331Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:147:2169], cookie=111, session=0, seqNo=0) 2026-02-19T08:23:55.009424Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:55.009564Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:147:2169], cookie=222, session=1, seqNo=0) 2026-02-19T08:23:55.021284Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:146:2168], cookie=123) 2026-02-19T08:23:55.021355Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:147:2169], cookie=111, session=2) 2026-02-19T08:23:55.021396Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:147:2169], cookie=222, session=1) 2026-02-19T08:23:55.570809Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:55.570930Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:55.586826Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:55.586977Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:55.611100Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:55.611516Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=4346649823095661158, session=0, seqNo=0) 2026-02-19T08:23:55.611667Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:55.624240Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=4346649823095661158, session=1) 2026-02-19T08:23:55.624565Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=13495677761968064270, session=0, seqNo=0) 2026-02-19T08:23:55.624697Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:55.636863Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=13495677761968064270, session=2) 2026-02-19T08:23:55.637208Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:55.637368Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:55.637468Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:55.651634Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-02-19T08:23:55.651975Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:23:55.652342Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:55.664734Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-02-19T08:23:55.664831Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=333) 2026-02-19T08:23:55.665351Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:153:2175], cookie=11235950340550183155, name="Lock1") 2026-02-19T08:23:55.665452Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:153:2175], cookie=11235950340550183155) 2026-02-19T08:23:55.665903Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:156:2178], cookie=2259344596296965243, name="Lock1") 2026-02-19T08:23:55.665981Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:156:2178], cookie=2259344596296965243) 2026-02-19T08:23:56.055731Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:56.055850Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.075837Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.076390Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.111392Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.119256Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=7132583700033094069, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-02-19T08:23:56.119499Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:23:56.131306Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=7132583700033094069) 2026-02-19T08:23:56.131808Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=16981422077007401230, path="/Root/Res", config={ }) 2026-02-19T08:23:56.132022Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-02-19T08:23:56.143997Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=16981422077007401230) 2026-02-19T08:23:56.145065Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 7770029712086428637. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:23:56.145114Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=7770029712086428637) 2026-02-19T08:23:56.145463Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 4410658552145989412. Data: { } 2026-02-19T08:23:56.145501Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=4410658552145989412) 2026-02-19T08:23:56.187473Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-02-19T08:23:56.239758Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-02-19T08:23:56.270981Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-02-19T08:23:56.312792Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-02-19T08:23:56.354672Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> KqpBatchUpdate::TableWithAsyncIndex-EnableIndexStreamWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: 2026-02-19T08:21:52.161142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488662064406644:2187];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:52.161247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:52.409791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:52.438121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:52.438198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:52.599247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488662064406495:2081] 1771489312135003 != 1771489312135006 2026-02-19T08:21:52.638839Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:52.670671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:52.678422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:53.137476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:53.137501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:53.137510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:53.137602Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:53.171639Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:53.918870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:53.924198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:54.011418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.158329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.340983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.425028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.703851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488679244277565:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.703974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.704453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488679244277575:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.704503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.161940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488662064406644:2187];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:57.162014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:57.455730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.511333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.603370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.790406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.846401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.909062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.959038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.012624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.103042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488687834213040:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.103138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.103321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488687834213045:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.103358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488687834213046:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.103449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.107227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:58.120937Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488687834213049:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:58.222754Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488687834213102:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:01.461937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, p ... iscarding snapshot; our snapshot: [step: 1771489422486, txId: 281474976710673] shutting down 2026-02-19T08:23:44.666729Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7608489143436951715:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:44.668800Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:44.707468Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:44.884779Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:44.918429Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:44.918551Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:44.931150Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:44.944861Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:45.078598Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:45.078635Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:45.078648Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:45.078757Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:45.460995Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:45.673951Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:46.071475Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:46.094541Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.190989Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.477654Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.720909Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:49.662056Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7608489143436951715:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:49.662160Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:23:51.133853Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489173501724619:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.133978Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.134380Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489173501724629:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.134437Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.273127Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.363262Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.435307Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.496359Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.576225Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.634286Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.695800Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.769484Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.938213Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489173501725522:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.938389Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.938880Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489173501725527:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.938972Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489173501725528:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.939207Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:51.946477Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:51.963929Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7608489173501725531:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:23:52.022492Z node 13 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [13:7608489177796692878:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:23:55.959027Z node 13 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489435422, txId: 281474976710673] shutting down >> KqpBatchUpdate::Large_2 >> KqpBatchUpdate::MultiStatement >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:19:12.072009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:19:12.197219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:19:12.221874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:19:12.221974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:19:12.222616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:19:12.571796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:12.571943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:12.645512Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489149065097 != 1771489149065101 2026-02-19T08:19:12.653789Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:12.700785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:12.825459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:13.054329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:19:13.321351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:13.475195Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:19:13.475289Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:19:13.475442Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:871:2753] 2026-02-19T08:19:13.587504Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:871:2753] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:19:13.587615Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:871:2753] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:19:13.588372Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:19:13.588473Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:871:2753] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:19:13.588823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:19:13.588981Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:871:2753] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:19:13.589117Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:871:2753] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:19:13.591605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:13.592149Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:871:2753] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:19:13.593045Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:871:2753] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:19:13.593135Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:871:2753] txid# 281474976715657 SEND to# [1:853:2735] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:19:13.661421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:19:13.662541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:19:13.662887Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2774] 2026-02-19T08:19:13.663232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:19:13.714113Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:887:2768], Recipient [1:895:2774]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:19:13.715056Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:19:13.715253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:19:13.717232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:19:13.717315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:19:13.717377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:19:13.720280Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:19:13.720472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:19:13.720572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:911:2774] in generation 1 2026-02-19T08:19:13.731464Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:19:13.777138Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:19:13.777367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:19:13.777489Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:913:2784] 2026-02-19T08:19:13.777531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:19:13.777585Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:19:13.777628Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:19:13.777903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:895:2774], Recipient [1:895:2774]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:13.777990Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:19:13.778406Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:19:13.778513Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:19:13.778592Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:19:13.778637Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:19:13.778677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:19:13.778718Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:19:13.778754Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:19:13.778810Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:19:13.778868Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:19:13.779329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:898:2775], Recipient [1:895:2774]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:13.779387Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:19:13.779429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:894:2773], serverId# [1:898:2775], sessionId# [0:0:0] 2026-02-19T08:19:13.779529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:898:2775] 2026-02-19T08:19:13.779570Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:19:13.779688Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:19:13.779920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:19:13.779972Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:19:13.780079Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transacti ... 37: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:23:55.911879Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:23:55.911986Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-02-19T08:23:55.912239Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-02-19T08:23:55.912287Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[29:1297:3061], 1} after executionsCount# 1 2026-02-19T08:23:55.912324Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[29:1297:3061], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:23:55.912395Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[29:1297:3061], 1} finished in read 2026-02-19T08:23:55.912447Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-02-19T08:23:55.912476Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:23:55.912502Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:23:55.912531Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:23:55.912579Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-02-19T08:23:55.912604Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:23:55.912632Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037889 has finished 2026-02-19T08:23:55.912661Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:23:55.912748Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:23:55.912798Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:55.912834Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:23:55.913361Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037889] send [29:1151:2949] 2026-02-19T08:23:55.913400Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037889] push event to server [29:1151:2949] 2026-02-19T08:23:55.913514Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [29:1297:3061], Recipient [29:933:2792]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-02-19T08:23:55.913560Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 1 } 2026-02-19T08:23:55.913891Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037890] ::Bootstrap [29:1299:3064] 2026-02-19T08:23:55.913990Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037890] lookup [29:1299:3064] 2026-02-19T08:23:55.914162Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037890] queue send [29:1299:3064] 2026-02-19T08:23:55.914297Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037890] forward result local node, try to connect [29:1299:3064] 2026-02-19T08:23:55.914346Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:693: TClient[72075186224037890]::SendEvent [29:1299:3064] 2026-02-19T08:23:55.914533Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [29:1300:3065], Recipient [29:1254:3033]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:55.914576Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:55.914613Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037890, clientId# [29:1299:3064], serverId# [29:1300:3065], sessionId# [0:0:0] 2026-02-19T08:23:55.914655Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037890] connected with status OK role: Leader [29:1299:3064] 2026-02-19T08:23:55.914694Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037890] send queued [29:1299:3064] 2026-02-19T08:23:55.914724Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037890] push event to server [29:1299:3064] 2026-02-19T08:23:55.914910Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [29:1297:3061], Recipient [29:1254:3033]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-02-19T08:23:55.915010Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-02-19T08:23:55.915062Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-02-19T08:23:55.915148Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-02-19T08:23:55.915208Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-02-19T08:23:55.915275Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-02-19T08:23:55.915304Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-02-19T08:23:55.915332Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-02-19T08:23:55.915363Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-02-19T08:23:55.915417Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-02-19T08:23:55.915464Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-02-19T08:23:55.915489Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-02-19T08:23:55.915515Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-02-19T08:23:55.915544Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-02-19T08:23:55.915662Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-02-19T08:23:55.915901Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-02-19T08:23:55.915951Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037890 Complete read# {[29:1297:3061], 2} after executionsCount# 1 2026-02-19T08:23:55.915981Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037890 read iterator# {[29:1297:3061], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:23:55.916046Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037890 read iterator# {[29:1297:3061], 2} finished in read 2026-02-19T08:23:55.916085Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-02-19T08:23:55.916108Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-02-19T08:23:55.916127Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-02-19T08:23:55.916149Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-02-19T08:23:55.916185Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-02-19T08:23:55.916204Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-02-19T08:23:55.916223Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-02-19T08:23:55.916243Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-02-19T08:23:55.916321Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-02-19T08:23:55.916376Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-02-19T08:23:55.916415Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-02-19T08:23:55.916891Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037890] send [29:1299:3064] 2026-02-19T08:23:55.916930Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:651: TClient[72075186224037890] push event to server [29:1299:3064] 2026-02-19T08:23:55.917040Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [29:1297:3061], Recipient [29:1254:3033]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-02-19T08:23:55.917081Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } >> KqpScan::ScanRetryReadRanges [GOOD] >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore >> TLocksTest::Range_GoodLock1 [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TLocksTest::CK_Range_BrokenLockInf [GOOD] >> KqpBatchUpdate::NotIdempotent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:39.838427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:39.840479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:39.841494Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:39.992251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:39.993984Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:40.000252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:40.000623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:40.000897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:40.002649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:40.002763Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:40.454207Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:40.592892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:40.593025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:40.593815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:40.593907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:40.641270Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:23:40.641794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:40.642213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:40.683625Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:40.767112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:41.469049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:41.545451Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:41.545590Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:41.911506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:42.611057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1732:3112], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:42.611189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1742:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:42.611284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:42.612364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1747:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:42.612591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:42.627177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:43.143227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1746:3120], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:23:43.307626Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1844:3190] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 2 -- EvScan [1:1923:3243] -> [2:1878:2438] -- EvScanData from [2:1925:2445]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2026-02-19T08:23:45.950674Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down ... waiting for SysViewsRoster update finished 2026-02-19T08:23:54.523293Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:54.524217Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:54.524640Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:54.536156Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:54.538391Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:54.541077Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:54.542075Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:54.542267Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:54.544457Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:54.544627Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:54.855557Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:54.958358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:54.958506Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:54.959449Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:54.959528Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:55.016254Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:23:55.016716Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:55.017072Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:55.130549Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:55.162163Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:55.851129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:55.922658Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:55.922817Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:56.275811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.810820Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1735:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:56.810936Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1746:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:56.811094Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:56.812792Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1750:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:56.812981Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:23:56.819212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:23:57.291184Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1749:3124], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:23:57.402062Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1843:3190] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 4 -- EvScan [3:1922:3243] -> [4:1877:2438] -- EvScanData from [4:1924:2445]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976715661},"Finished":false} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976715661},"Finished":true} 2026-02-19T08:23:58.857604Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] Test command err: 2026-02-19T08:23:56.653373Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:56.653509Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.673930Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.674066Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.699998Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.704840Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2161], cookie=14618109902003492359, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-02-19T08:23:56.705085Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:23:56.717177Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2161], cookie=14618109902003492359) 2026-02-19T08:23:56.717696Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2168], cookie=16898176646630827734, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-02-19T08:23:56.717923Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2026-02-19T08:23:56.734140Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2168], cookie=16898176646630827734) 2026-02-19T08:23:56.734811Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2173], cookie=6272237287908206116, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:23:56.735029Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2026-02-19T08:23:56.752051Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2173], cookie=6272237287908206116) 2026-02-19T08:23:56.752770Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2178], cookie=4562657272586419063, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:23:56.753035Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-02-19T08:23:56.765265Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2178], cookie=4562657272586419063) 2026-02-19T08:23:56.765962Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2183], cookie=10781952530912552895, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:23:56.766165Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2026-02-19T08:23:56.778517Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2183], cookie=10781952530912552895) 2026-02-19T08:23:56.779252Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2188], cookie=8602939758366589333, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:23:56.779489Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2026-02-19T08:23:56.792222Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2188], cookie=8602939758366589333) 2026-02-19T08:23:56.792949Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2193], cookie=17254270347000774158, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-02-19T08:23:56.793134Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2026-02-19T08:23:56.810578Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2193], cookie=17254270347000774158) 2026-02-19T08:23:56.811266Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:176:2198], cookie=3753085976098237963, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:23:56.811496Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2026-02-19T08:23:56.823885Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:176:2198], cookie=3753085976098237963) 2026-02-19T08:23:56.824600Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:181:2203], cookie=7984763974099354633, ids=[100], paths=[], recursive=0) 2026-02-19T08:23:56.824695Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:181:2203], cookie=7984763974099354633) 2026-02-19T08:23:56.825227Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:184:2206], cookie=10527466239999829806, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-02-19T08:23:56.825316Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:184:2206], cookie=10527466239999829806) 2026-02-19T08:23:56.825849Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:187:2209], cookie=14728953263494460059, ids=[], paths=[/Root, ], recursive=0) 2026-02-19T08:23:56.825963Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:187:2209], cookie=14728953263494460059) 2026-02-19T08:23:56.826427Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:190:2212], cookie=10955968920090755238, ids=[1, 1], paths=[], recursive=0) 2026-02-19T08:23:56.826525Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:190:2212], cookie=10955968920090755238) 2026-02-19T08:23:56.827208Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:193:2215], cookie=1901160203686495434, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2026-02-19T08:23:56.827279Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:193:2215], cookie=1901160203686495434) 2026-02-19T08:23:56.827788Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:196:2218], cookie=17904761535351602133, ids=[], paths=[], recursive=1) 2026-02-19T08:23:56.827856Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:196:2218], cookie=17904761535351602133) 2026-02-19T08:23:56.828449Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:199:2221], cookie=12097351361937628081, ids=[], paths=[], recursive=0) 2026-02-19T08:23:56.828555Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:199:2221], cookie=12097351361937628081) 2026-02-19T08:23:56.829018Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:202:2224], cookie=9198194344970401005, ids=[3, 2], paths=[], recursive=1) 2026-02-19T08:23:56.829096Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:202:2224], cookie=9198194344970401005) 2026-02-19T08:23:56.829598Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:205:2227], cookie=3598987836451853808, ids=[3, 2], paths=[], recursive=0) 2026-02-19T08:23:56.829676Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:205:2227], cookie=3598987836451853808) 2026-02-19T08:23:56.830202Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:208:2230], cookie=9552311913658993574, ids=[], paths=[Root2/], recursive=1) 2026-02-19T08:23:56.830278Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:208:2230], cookie=9552311913658993574) 2026-02-19T08:23:56.830760Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:211:2233], cookie=12154806195890253063, ids=[], paths=[Root2/], recursive=0) 2026-02-19T08:23:56.830815Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:211:2233], cookie=12154806195890253063) 2026-02-19T08:23:56.846515Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:56.846621Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:56.847076Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:56.847674Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:56.903679Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:56.904171Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=2241782202070617733, ids=[100], paths=[], recursive=0) 2026-02-19T08:23:56.904274Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=2241782202070617733) 2026-02-19T08:23:56.905050Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2268], cookie=15680875339163324131, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-02-19T08:23:56.905146Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2268], cookie=15680875339163324131) 2026-02-19T08:23:56.905762Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2271], cookie=2878425308947309896, ids=[], paths=[/Root, ], recursive=0) 2026-02-19T08:23:56.905850Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2271], cookie=2878425308947309896) 2026-02-19T08:23:56.906481Z ... oterResourceAdd::Complete (sender=[4:146:2168], cookie=16922873841155989997) 2026-02-19T08:23:58.529067Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:151:2173], cookie=13310757456960785592, path="/Root/Folder", config={ }) 2026-02-19T08:23:58.529248Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2026-02-19T08:23:58.540957Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:151:2173], cookie=13310757456960785592) 2026-02-19T08:23:58.541511Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:156:2178], cookie=11192239664366235881, path="/Root/Folder/Q1", config={ }) 2026-02-19T08:23:58.541732Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-02-19T08:23:58.553655Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:156:2178], cookie=11192239664366235881) 2026-02-19T08:23:58.554240Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:161:2183], cookie=13127551263072550615, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.554332Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:161:2183], cookie=13127551263072550615) 2026-02-19T08:23:58.555117Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:167:2189], cookie=584202618255227414, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.555198Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:167:2189], cookie=584202618255227414) 2026-02-19T08:23:58.555903Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:173:2195], cookie=8808400457233417909, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.555964Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:173:2195], cookie=8808400457233417909) 2026-02-19T08:23:58.556370Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:176:2198], cookie=4076770810160038899, id=0, path="/Root/Folder/NonexistingRes") 2026-02-19T08:23:58.556447Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:176:2198], cookie=4076770810160038899) 2026-02-19T08:23:58.556839Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:179:2201], cookie=7720419299192923795, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.556897Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:179:2201], cookie=7720419299192923795) 2026-02-19T08:23:58.557281Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:182:2204], cookie=17787900024210023431, id=100, path="") 2026-02-19T08:23:58.557329Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:182:2204], cookie=17787900024210023431) 2026-02-19T08:23:58.557696Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:185:2207], cookie=15983031825775762350, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.557784Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:185:2207], cookie=15983031825775762350) 2026-02-19T08:23:58.558288Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:188:2210], cookie=6099801824288656085, id=3, path="") 2026-02-19T08:23:58.558340Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:188:2210], cookie=6099801824288656085) 2026-02-19T08:23:58.558713Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:191:2213], cookie=10897593335404456481, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.558762Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:191:2213], cookie=10897593335404456481) 2026-02-19T08:23:58.559237Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:194:2216], cookie=10277676561609843223, id=0, path="/Root/Folder/Q1") 2026-02-19T08:23:58.559505Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2026-02-19T08:23:58.571290Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:194:2216], cookie=10277676561609843223) 2026-02-19T08:23:58.571829Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:199:2221], cookie=16652351991909971116, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.571904Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:199:2221], cookie=16652351991909971116) 2026-02-19T08:23:58.583108Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:58.583209Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:58.583631Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:58.584092Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:58.620399Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:58.620784Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:238:2251], cookie=11512318702821947232, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.620963Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:238:2251], cookie=11512318702821947232) 2026-02-19T08:23:58.621605Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:244:2256], cookie=2399925256498938368, id=3, path="") 2026-02-19T08:23:58.621737Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2026-02-19T08:23:58.644334Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:244:2256], cookie=2399925256498938368) 2026-02-19T08:23:58.645128Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:249:2261], cookie=12552771901433160081, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.645233Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:249:2261], cookie=12552771901433160081) 2026-02-19T08:23:58.657805Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:58.657915Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:58.658452Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:58.659070Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:58.695048Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:58.695377Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:288:2291], cookie=16942159028422615960, ids=[], paths=[], recursive=1) 2026-02-19T08:23:58.695454Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:288:2291], cookie=16942159028422615960) 2026-02-19T08:23:59.069569Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:59.069689Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:59.091284Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:59.091861Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:59.127324Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:59.127862Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=10723440313341996035, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:23:59.128219Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:23:59.140366Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=10723440313341996035) 2026-02-19T08:23:59.141008Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=1034389730099141673, path="/Root/Res", config={ }) 2026-02-19T08:23:59.141255Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-02-19T08:23:59.153600Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=1034389730099141673) 2026-02-19T08:23:59.157752Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 12417418207870490234. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:23:59.157851Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=12417418207870490234) 2026-02-19T08:23:59.158519Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:152:2174]. Cookie: 12023022068958287941. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2026-02-19T08:23:59.158580Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:152:2174], cookie=12023022068958287941) >> ReadLoad::ShouldReadIterate >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2026-02-19T08:23:06.887316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488980710289560:2258];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:06.887393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:06.941051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:07.253565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.258113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:07.258176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:07.339118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:07.417382Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:07.418963Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488980710289322:2081] 1771489386847977 != 1771489386847980 2026-02-19T08:23:07.474103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.840945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:07.865807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:07.882098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:08.005505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:08.214713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:08.289156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:11.962070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:11.968221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:11.968810Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:11.972552Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489001380376245:2081] 1771489391609755 != 1771489391609758 2026-02-19T08:23:12.125622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:12.125695Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:12.131801Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:12.227700Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:12.352726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:12.376211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:12.474377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:12.525812Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:12.767429Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:16.630188Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.630789Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:16.920319Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.977482Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:16.977580Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:17.063247Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:17.073946Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489024021690803:2081] 1771489396472072 != 1771489396472075 2026-02-19T08:23:17.164388Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:17.289529Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:17.603715Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:17.672145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:17.678990Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:17.692493Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:17.748468Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:17.907260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:18.026183Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:23.040644Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:23.049893Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:23.208462Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:23.208590Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:23.221790Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608489048611269332:2081] 1771489402892599 != 1771489402892602 2026-02-19T08:23:23.244616Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:23.258690Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) Vola ... meOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:39.136590Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:39.201924Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:39.452283Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:44.696478Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608489143786547028:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:44.704880Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:44.705044Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:44.864835Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:44.865662Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:44.868793Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608489143786546888:2081] 1771489424637769 != 1771489424637772 2026-02-19T08:23:44.904678Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:44.904801Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:44.937151Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:45.122585Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:45.203037Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:45.227078Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:45.322457Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:45.407799Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:45.634206Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:50.250912Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608489168799661229:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:50.251603Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:50.336655Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:50.497260Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:50.497530Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:50.510757Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608489168799661175:2081] 1771489430235603 != 1771489430235606 2026-02-19T08:23:50.513199Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:50.525861Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:50.621402Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:50.937103Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:51.065447Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:51.100854Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:51.141880Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:51.158120Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.259918Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.267569Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:51.343163Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:55.811189Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608489192077643450:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:55.811282Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:55.828324Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:55.933043Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:55.936898Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608489192077643415:2081] 1771489435810160 != 1771489435810163 2026-02-19T08:23:55.951599Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:55.951711Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:55.978889Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:56.129065Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:56.255919Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:56.277264Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.345619Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.413200Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2026-02-19T08:23:09.607042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488992852210132:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:09.607626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:09.916013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:09.984781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:09.984863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:10.058343Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:10.060629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:10.060992Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488992852210089:2081] 1771489389585529 != 1771489389585532 2026-02-19T08:23:10.208054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:10.327479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:10.346743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:23:10.418416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:10.592040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:10.626301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:10.653470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.474583Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489010001338305:2187];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.474632Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.517048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.581203Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.581276Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.587922Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489010001338156:2081] 1771489393467745 != 1771489393467748 2026-02-19T08:23:13.596754Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.608600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:13.798491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.832183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.837088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:13.850065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.917336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:13.980652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:18.113207Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608489030564443535:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:18.113288Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:18.113398Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:18.426849Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:18.428620Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:18.428831Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489030564443306:2081] 1771489398051899 != 1771489398051902 2026-02-19T08:23:18.459234Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:18.459318Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:18.485323Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:18.601808Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:18.811152Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:18.830543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:18.847552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:18.858644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:18.929179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.043659Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.094291Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:23.189441Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:23.189532Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:23.192450Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:23.192687Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:23.193525Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608489050447853304:2081] 1771489402881942 != 1771489402881945 2026-02-19T08:23: ... type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:40.839053Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:40.849890Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:40.853596Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:40.948698Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:41.021409Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:45.210318Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608489146969087380:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:45.211483Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:45.241821Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:45.401863Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:45.404546Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:45.409823Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608489146969087336:2081] 1771489425204912 != 1771489425204915 2026-02-19T08:23:45.425776Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:45.425881Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:45.457325Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:45.836410Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:45.919813Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:45.936112Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:45.961439Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.043656Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.102148Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:46.218663Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:51.179780Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608489172078936040:2162];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:51.179909Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:51.234398Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:51.347518Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:51.365997Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:51.366189Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:51.399513Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:51.521835Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:51.731056Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:51.756053Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:51.768677Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.848844Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:51.918044Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.455705Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608489196054332047:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:56.461891Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:56.499217Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:56.598060Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:56.601464Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608489196054332008:2081] 1771489436453216 != 1771489436453219 2026-02-19T08:23:56.615063Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:56.615194Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:56.649443Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:56.779206Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:57.024166Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:57.047628Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.114231Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.171242Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupRestoreWithIndex >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> ReadLoad::ShouldReadKqp >> KqpBatchUpdate::MultiStatement [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] >> KqpBatchUpdate::TableWithAsyncIndex-EnableIndexStreamWrite [GOOD] >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] >> TxUsage::WriteToTopic_Demo_42_Query >> BasicUsage::KeyedWriteSession_ProducerIdPrefixRequired [GOOD] >> BasicUsage::KeyedWriteSession_SessionClosedDueToUserError >> TLocksTest::Range_BrokenLock1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: 2026-02-19T08:23:59.479963Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489206495573838:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:59.482789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:59.673264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:59.688374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:59.688463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:59.762282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:59.769844Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:59.771422Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489206495573805:2081] 1771489439477582 != 1771489439477585 2026-02-19T08:23:59.880017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:59.880045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:59.880051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:59.880136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:59.942185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:02.435143Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006dc/r3tmp/spilling-tmp-runner/node_1_6104fc53-60001bef-b3dcdbc4-fa007d59, actor: [1:7608489219380477526:2386] 2026-02-19T08:24:02.435321Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006dc/r3tmp/spilling-tmp-runner 2026-02-19T08:24:04.535407Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535495Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535529Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535559Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535593Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535634Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535692Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535747Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535792Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535851Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535895Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.535975Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:04.536174Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:04.536233Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:04.536522Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:04.536699Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.536777Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.536828Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.536888Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.536954Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.537007Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.537061Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.537116Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.537177Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.537235Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... [1:7608489227970413205:2532], CA [1:7608489227970413201:2528], CA [1:7608489227970413202:2529], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555150Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], CA [1:7608489227970413201:2528], CA [1:7608489227970413202:2529], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555370Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489227970413201:2528] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 642 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 409 FinishTimeMs: 1771489444552 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 399 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489444544 UpdateTimeMs: 1771489444552 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:04.555391Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489227970413201:2528] 2026-02-19T08:24:04.555420Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], CA [1:7608489227970413202:2529], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555435Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], CA [1:7608489227970413202:2529], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555635Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489227970413202:2529] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1487 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 369 FinishTimeMs: 1771489444552 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 357 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489444544 UpdateTimeMs: 1771489444552 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:04.555655Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489227970413202:2529] 2026-02-19T08:24:04.555685Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555714Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], CA [1:7608489227970413203:2530], trace_id# 2026-02-19T08:24:04.555919Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489227970413203:2530] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1181 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 859 FinishTimeMs: 1771489444552 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 850 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489444546 UpdateTimeMs: 1771489444552 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:04.555938Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489227970413203:2530] 2026-02-19T08:24:04.555956Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], trace_id# 2026-02-19T08:24:04.555971Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489227970413204:2531], CA [1:7608489227970413205:2532], trace_id# 2026-02-19T08:24:04.556175Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489227970413204:2531] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1239 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 866 FinishTimeMs: 1771489444553 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 42 BuildCpuTimeUs: 824 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489444547 UpdateTimeMs: 1771489444553 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:04.556198Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489227970413204:2531] 2026-02-19T08:24:04.556217Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489227970413205:2532], trace_id# 2026-02-19T08:24:04.556230Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489227970413205:2532], trace_id# 2026-02-19T08:24:04.556531Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489227970413205:2532] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1744 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1344 FinishTimeMs: 1771489444553 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 47 BuildCpuTimeUs: 1297 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489444548 UpdateTimeMs: 1771489444553 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:04.556550Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489227970413205:2532] 2026-02-19T08:24:04.556584Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489227970413191:2400] trace_id# 2026-02-19T08:24:04.556615Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.010930s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:24:04.577898Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:04.577939Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489227970413192:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf9c94n781zd06mh6ft9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzNzc2OTctMjYwOWE2ZjctZGI3MmNhNzAtYWNiYTIzYzg=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithAsyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:23:59.236323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489208856548045:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:59.236565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:59.426993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:59.450590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:59.450673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:59.498359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:59.502867Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:59.525868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489208856548011:2081] 1771489439234601 != 1771489439234604 2026-02-19T08:23:59.588104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:59.588134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:59.588157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:59.588287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:59.674768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:02.087520Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006dd/r3tmp/spilling-tmp-runner/node_1_16cadbdf-de518f44-929925d1-425e45f3, actor: [1:7608489221741450750:2309] 2026-02-19T08:24:02.087746Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006dd/r3tmp/spilling-tmp-runner 2026-02-19T08:24:02.799273Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:02.799334Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:02.799488Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:02.799507Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:02.799965Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 1 2026-02-19T08:24:02.800188Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:02.800424Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '"2")) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (return (Iterator (AsList $4 $5 $6 $7 $8))) )))) ) 2026-02-19T08:24:02.800498Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:02.800759Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:02.801219Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489221741451083:2325] 2026-02-19T08:24:02.801262Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489221741451083:2325], channels: 0 2026-02-19T08:24:02.801351Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:02.801379Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:02.801399Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489221741451083:2325] 2026-02-19T08:24:02.801428Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489221741451083:2325], channels: 0 2026-02-19T08:24:02.801515Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489221741451083:2325], trace_id# 2026-02-19T08:24:02.801570Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489221741451083:2325], trace_id# 2026-02-19T08:24:02.801613Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:02.833094Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489221741451083:2325] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:02.833192Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489221741451083:2325], trace_id# 2026-02-19T08:24:02.833238Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489221741451083:2325], trace_id# 2026-02-19T08:24:02.834470Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489221741451080:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489221741451083:2325] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2170 Tasks { TaskId: 1 CpuTimeUs: 1237 FinishTimeMs: 1771489442833 EgressBytes: 105 EgressRows: 5 ComputeCpuTimeUs: 50 BuildCpuTimeUs: 1187 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489442801 UpdateTimeMs: 1771489442833 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:02.834534Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710661. Ctx: { TraceId: 01khtfzetn5wxnrkmd1s0dpcmh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Compute ac ... EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489234626353253:2444], channels: 2 2026-02-19T08:24:05.091863Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489234626353252:2443], channels: 1 2026-02-19T08:24:05.091916Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# true trace_id# 2026-02-19T08:24:05.091932Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:05.091943Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489234626353253:2444] 2026-02-19T08:24:05.091959Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489234626353252:2443] 2026-02-19T08:24:05.091993Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489234626353253:2444], channels: 2 2026-02-19T08:24:05.092011Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489234626353252:2443], channels: 1 2026-02-19T08:24:05.092062Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.092096Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.092119Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:05.093222Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489234626353252:2443] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:05.093246Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.093278Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.093300Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489234626353253:2444] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:05.093311Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.093338Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489234626353253:2444], CA [1:7608489234626353252:2443], trace_id# 2026-02-19T08:24:05.094245Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489234626353252:2443] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1227 Tasks { TaskId: 1 CpuTimeUs: 651 FinishTimeMs: 1771489445093 OutputBytes: 1 Tables { TablePath: "/Root/global_async_idx/idx/indexImplTable" ReadRows: 5 ReadBytes: 40 AffectedPartitions: 1 } IngressRows: 5 ComputeCpuTimeUs: 86 BuildCpuTimeUs: 565 WaitOutputTimeUs: 24 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489445093 CreateTimeMs: 1771489445092 UpdateTimeMs: 1771489445093 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:05.094282Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489234626353252:2443] 2026-02-19T08:24:05.094301Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489234626353253:2444], trace_id# 2026-02-19T08:24:05.094315Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489234626353253:2444], trace_id# 2026-02-19T08:24:05.094379Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489234626353253:2444] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 709 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 141 FinishTimeMs: 1771489445094 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 74 BuildCpuTimeUs: 67 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489445093 CreateTimeMs: 1771489445093 UpdateTimeMs: 1771489445094 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:05.094390Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489234626353253:2444] 2026-02-19T08:24:05.094469Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:05.094490Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:05.094525Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489234626353248:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtfzgv04yg3bf1km4d9vwta, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWU1MGEwM2ItMjM4NmY5NTYtNmVkYTlhNzQtNTU0NTc2NmM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001936s ReadRows: 5 ReadBytes: 40 RequestUnits# 5 ForceFlag# true trace_id# >> KqpBatchUpdate::NotIdempotent [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> KqpResultSetFormats::ArrowFormat_BulkUpsert+IsOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_BulkUpsert-IsOlap >> TxUsage::WriteToTopic_Demo_11_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2026-02-19T08:23:14.591430Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489014725798946:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:14.591471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:15.009154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:15.036114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:15.036189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:15.132268Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:15.137870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489014725798695:2081] 1771489394507772 != 1771489394507775 2026-02-19T08:23:15.157087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:15.251544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:15.429560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:15.446246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:15.459218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:15.552725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:15.586921Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:15.801749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:15.888945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:19.364444Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:19.365961Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489035691706793:2270];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:19.366011Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:19.589794Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:19.602103Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:19.610097Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489035691706547:2081] 1771489399273486 != 1771489399273489 2026-02-19T08:23:19.627917Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:19.628013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:19.686054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:19.821789Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:20.059694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:20.077968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:20.094567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:23:20.099602Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:20.236464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:20.357084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:20.359190Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:24.281140Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608489059091860418:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:24.281196Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:24.305029Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:24.306933Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005782s 2026-02-19T08:23:24.392304Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:24.535777Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:24.537809Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:24.537880Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:24.541135Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489059091860182:2081] 1771489404231445 != 1771489404231448 2026-02-19T08:23:24.589162Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:24.613829Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:24.881621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:24.894509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:23:24.918119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:23:24.928477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.021448Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.096079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:25.281844Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:29.447494Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp: ... ns 2026-02-19T08:23:47.876720Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:47.899943Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:47.979370Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:48.092993Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:48.415380Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:52.195010Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608489175889904410:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:52.202418Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:52.316979Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:52.327261Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:52.327370Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:52.341800Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608489175889904355:2081] 1771489432186753 != 1771489432186756 2026-02-19T08:23:52.372119Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:52.383263Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:52.488266Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:52.675805Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:52.703511Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:52.772888Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:52.888926Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.810203Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608489195154971692:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:56.810265Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:56.834367Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:56.950330Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:56.953655Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608489195154971661:2081] 1771489436808895 != 1771489436808898 2026-02-19T08:23:56.956005Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:56.956132Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:57.022825Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:57.024477Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:57.303725Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:57.325848Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.395634Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.442706Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:57.461497Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:01.321218Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608489218192774063:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:01.321311Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:01.341320Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:01.428618Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:01.430314Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608489218192774032:2081] 1771489441320130 != 1771489441320133 2026-02-19T08:24:01.466067Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:01.466185Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:01.473242Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:01.633976Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:01.752030Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:01.758251Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:24:01.771251Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:24:01.776021Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:01.840552Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:01.979682Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> TxUsage::WriteToTopic_Demo_12_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2026-02-19T08:23:15.198308Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489018689210061:2167];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:15.198373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:15.722725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:15.722849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:15.860041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:15.891225Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:15.911998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:15.971955Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489018689209931:2081] 1771489395159645 != 1771489395159648 2026-02-19T08:23:16.089806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:16.205958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:16.681555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:16.688809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:16.710069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:16.890291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:17.216946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:17.292771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:20.552302Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489041667166665:2216];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:20.552491Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:20.557286Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:20.693258Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:20.693603Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:20.698088Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489041667166486:2081] 1771489400524478 != 1771489400524481 2026-02-19T08:23:20.705194Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:20.705287Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:20.748211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:20.917827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:21.011780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:21.021980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:21.046929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:21.060687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:21.216065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:21.308253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:21.558091Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:25.474278Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608489062025118164:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:25.474668Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:25.511865Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:25.688468Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:25.689489Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:25.693947Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608489062025118125:2081] 1771489405470729 != 1771489405470732 2026-02-19T08:23:25.716358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:25.716434Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:25.756541Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:25.867862Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:26.001597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:26.007896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:26.041102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:26.167443Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:26.230406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:26.507942Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:30.347240Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608489084276879439:2167];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:30.347300Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;err ... 2026-02-19T08:23:46.628451Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:46.889015Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:46.896783Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:23:46.908220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:46.914445Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:47.046412Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:47.181510Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:47.322387Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:51.944152Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608489171377443391:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:51.944863Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:52.048930Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:52.069259Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608489171377443349:2081] 1771489431939009 != 1771489431939012 2026-02-19T08:23:52.086788Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:52.095790Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:52.095941Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:52.110289Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:52.312617Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:52.459034Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:52.485655Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:52.561415Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:52.628081Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:56.796698Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608489196317158181:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:56.797560Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:56.841862Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:56.911153Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:56.913548Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608489196317158140:2081] 1771489436793745 != 1771489436793748 2026-02-19T08:23:56.962790Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:56.962903Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:56.966549Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:57.135997Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:57.202874Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:57.222019Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.281859Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:57.325854Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:01.581314Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608489217616389440:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:01.582272Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:01.612112Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:01.708956Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:01.712225Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608489217616389404:2081] 1771489441576362 != 1771489441576365 2026-02-19T08:24:01.731168Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:01.731317Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:01.760990Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:01.873193Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:02.051800Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:02.072881Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:02.127068Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:02.172405Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: 2026-02-19T08:24:01.469580Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489214705335257:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:01.470210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:01.670307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:01.687160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:01.687244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:01.754539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:01.758325Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:01.759801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489214705335226:2081] 1771489441467994 != 1771489441467997 2026-02-19T08:24:01.851762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:01.851793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:01.851808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:01.851915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:01.861652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:04.265579Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d9/r3tmp/spilling-tmp-runner/node_1_a06737cd-50f44d71-ac365c81-22ba28e5, actor: [1:7608489227590238946:2384] 2026-02-19T08:24:04.265827Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d9/r3tmp/spilling-tmp-runner 2026-02-19T08:24:06.320166Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320221Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320283Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320344Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320379Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320430Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320476Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320519Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320565Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320607Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320647Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:06.320716Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:06.320913Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:06.320949Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:06.321280Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:06.321452Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321508Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321570Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321659Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321734Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321800Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321870Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321937Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.321982Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:06.322047Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... A [1:7608489236180174626:2529], CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.339675Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489236180174625:2528], CA [1:7608489236180174626:2529], CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.339874Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489236180174625:2528] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 602 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 370 FinishTimeMs: 1771489446336 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 361 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489446329 UpdateTimeMs: 1771489446336 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:06.339892Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489236180174625:2528] 2026-02-19T08:24:06.339913Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489236180174626:2529], CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.339929Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489236180174626:2529], CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340130Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489236180174626:2529] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1140 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 316 FinishTimeMs: 1771489446336 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 9 BuildCpuTimeUs: 307 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489446330 UpdateTimeMs: 1771489446336 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:06.340152Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489236180174626:2529] 2026-02-19T08:24:06.340187Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340203Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489236180174627:2530], CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340407Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489236180174627:2530] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1006 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 718 FinishTimeMs: 1771489446337 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 708 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489446331 UpdateTimeMs: 1771489446337 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:06.340436Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489236180174627:2530] 2026-02-19T08:24:06.340454Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340469Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489236180174628:2531], CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340713Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489236180174628:2531] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 940 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 675 FinishTimeMs: 1771489446337 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 663 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489446332 UpdateTimeMs: 1771489446337 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:06.340730Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489236180174628:2531] 2026-02-19T08:24:06.340746Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.340758Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489236180174629:2532], trace_id# 2026-02-19T08:24:06.341031Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489236180174629:2532] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1688 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1308 FinishTimeMs: 1771489446337 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 20 BuildCpuTimeUs: 1288 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489446333 UpdateTimeMs: 1771489446337 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:06.341047Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489236180174629:2532] 2026-02-19T08:24:06.341079Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489236180174615:2400] trace_id# 2026-02-19T08:24:06.341136Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.011032s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:24:06.364239Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:06.364275Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489236180174616:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzgykdbtq04axzq4fs2fz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmYxNTI4YzctMzgwNDBiY2UtOTkxZDk3ZmMtN2MyMjkyMg==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAuthorization >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> UpsertLoad::ShouldWriteKqpUpsert >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |93.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> KqpBatchDelete::Large_2 >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:07.952730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:08.047563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:08.070221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:08.070292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:08.070744Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:08.307542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:08.307651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:08.386278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489445551418 != 1771489445551422 2026-02-19T08:24:08.395704Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:08.441842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:08.533491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:08.826038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:08.839009Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:08.944135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:09.210350Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-02-19T08:24:09.210493Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-02-19T08:24:09.214777Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} started# 5 actors each with inflight# 4 2026-02-19T08:24:09.214858Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:09.214916Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:09.214944Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:09.214990Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:09.215017Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:09.218485Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} session: ydb://session/3?node_id=1&id=OWNlMWE3NWEtOTcxODljMzQtZmYxZGU5ZjQtYzAzNmExYzM= 2026-02-19T08:24:09.220454Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} session: ydb://session/3?node_id=1&id=MjI1NzVkNTktNzg2ZGFkNDYtNWEwZDg0Y2ItM2Y0ZGIxMjY= 2026-02-19T08:24:09.222229Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} session: ydb://session/3?node_id=1&id=NTY2NmVlYzEtODMxMWQ3YzAtYWNiNjM3MjktYjllZjZmMzU= 2026-02-19T08:24:09.223958Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} session: ydb://session/3?node_id=1&id=OWE2NDBjMjQtYmYyNGNkMDQtNzc3OGRjZDUtOTQ2YmJkOQ== 2026-02-19T08:24:09.225917Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} session: ydb://session/3?node_id=1&id=Y2U1Nzk4YWQtOGFiZmIyNWQtNzZmZmJjNS0xOGRlMTM2Zg== 2026-02-19T08:24:09.229841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:972:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.229958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.230014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.230089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:998:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.230167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.230230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1001:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.230342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.231971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.232259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:09.238129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:09.290646Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1015:2866] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:09.291507Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1019:2870] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:09.292196Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1021:2872] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:09.292704Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1028:2873] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:09.410550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:09.410656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:09.410702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1010:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:09.410754Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1011:2862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:09.410793Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1012:2863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:09.445836Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2930] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:09.945106Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} finished in 1771489449.945053s, errors=0 2026-02-19T08:24:09.945415Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1771489449945 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:09.958970Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1171:2968] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:10.020761Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} finished in 1771489450.020656s, errors=0 2026-02-19T08:24:10.020911Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1771489450020 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:10.034697Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1218:2990] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:10.101797Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} finished in 1771489450.101758s, errors=0 2026-02-19T08:24:10.102084Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1771489450101 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:10.115990Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1265:3012] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:10.180943Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} finished in 1771489450.180897s, errors=0 2026-02-19T08:24:10.181255Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1771489450180 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:10.194854Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1312:3034] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:10.260534Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} finished in 1771489450.260492s, errors=0 2026-02-19T08:24:10.260819Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1771489450260 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:10.260893Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} finished in 1.046294s, oks# 20, errors# 0 2026-02-19T08:24:10.261012Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:04.373061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:04.459321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:04.476191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:04.476267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:04.476748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:04.730317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:04.730442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:04.811142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489441855440 != 1771489441855444 2026-02-19T08:24:04.817534Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:04.862669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:04.947704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:05.242771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:05.256550Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:05.361908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:05.636558Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2026-02-19T08:24:05.638799Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-02-19T08:24:05.685321Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 1} TUpsertActor finished in 0.046165s, errors=0 2026-02-19T08:24:05.685929Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-02-19T08:24:05.686070Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:967:2824] with id# {Tag: 0, parent: [1:958:2815], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-02-19T08:24:05.687451Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [1:958:2815], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-02-19T08:24:05.687604Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:970:2827] 2026-02-19T08:24:05.687718Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Bootstrap called, sample# 0 2026-02-19T08:24:05.687770Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Connect to# 72075186224037888 called 2026-02-19T08:24:05.688030Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:05.694436Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} finished in 0.006330s, read# 1000 2026-02-19T08:24:05.694862Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:970:2827] with chunkSize# 0 finished: 0 { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 } 2026-02-19T08:24:05.695036Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:973:2830] 2026-02-19T08:24:05.695094Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 2} Bootstrap called, sample# 0 2026-02-19T08:24:05.695129Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 2} Connect to# 72075186224037888 called 2026-02-19T08:24:05.695402Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:05.922647Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 2} finished in 0.227154s, read# 1000 2026-02-19T08:24:05.922809Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:973:2830] with chunkSize# 1 finished: 0 { DurationMs: 227 OperationsOK: 1000 OperationsError: 0 } 2026-02-19T08:24:05.922927Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:976:2833] 2026-02-19T08:24:05.922972Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 3} Bootstrap called, sample# 0 2026-02-19T08:24:05.923003Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 3} Connect to# 72075186224037888 called 2026-02-19T08:24:05.923241Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:05.995226Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 3} finished in 0.071927s, read# 1000 2026-02-19T08:24:05.995385Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:976:2833] with chunkSize# 10 finished: 0 { DurationMs: 71 OperationsOK: 1000 OperationsError: 0 } 2026-02-19T08:24:05.995537Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:979:2836] 2026-02-19T08:24:05.995592Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 4} Bootstrap called, sample# 1000 2026-02-19T08:24:05.995622Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 4} Connect to# 72075186224037888 called 2026-02-19T08:24:05.995858Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:05.998725Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 4} finished in 0.002303s, sampled# 1000, iter finished# 1, oks# 1000 2026-02-19T08:24:05.998842Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [1:958:2815], subTag: 3} received keyCount# 1000 2026-02-19T08:24:05.998990Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [1:958:2815], subTag: 3} started read actor with id# [1:982:2839] 2026-02-19T08:24:05.999050Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:967:2824], subTag: 5} Bootstrap called, will read keys# 1000 2026-02-19T08:24:06.441296Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [1:958:2815], subTag: 3} received point times# 1000, Inflight left# 0 2026-02-19T08:24:06.441461Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 442 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 50\n" } 2026-02-19T08:24:06.441621Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [1:958:2815], subTag: 3} finished in 0.755386s with report: { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 227 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 71 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 442 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 50\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-02-19T08:24:06.441928Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:967:2824] with tag# 3 ... waiting for SysViewsRoster update finished 2026-02-19T08:24:09.416106Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:09.420210Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:09.423790Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:09.424139Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:09.424190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:09.641753Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:09.641856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:09.663066Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489446808290 != 1771489446808294 2026-02-19T08:24:09.665820Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:09.710101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:09.792205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:10.083010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:10.096264Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:10.199191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:10.430443Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2026-02-19T08:24:10.430728Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:958:2815], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-02-19T08:24:10.442480Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:958:2815], subTag: 1} TUpsertActor finished in 0.011519s, errors=0 2026-02-19T08:24:10.443055Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-02-19T08:24:10.443162Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:967:2824] with id# {Tag: 0, parent: [2:958:2815], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-02-19T08:24:10.444157Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [2:958:2815], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-02-19T08:24:10.444252Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:970:2827] 2026-02-19T08:24:10.444332Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 1} Bootstrap called, sample# 0 2026-02-19T08:24:10.444393Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 1} Connect to# 72075186224037888 called 2026-02-19T08:24:10.444641Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:10.445206Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 1} finished in 0.000520s, read# 10 2026-02-19T08:24:10.445329Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:970:2827] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-02-19T08:24:10.445438Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:973:2830] 2026-02-19T08:24:10.445504Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 2} Bootstrap called, sample# 0 2026-02-19T08:24:10.445523Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 2} Connect to# 72075186224037888 called 2026-02-19T08:24:10.445733Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:10.447360Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 2} finished in 0.001588s, read# 10 2026-02-19T08:24:10.447443Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:973:2830] with chunkSize# 1 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2026-02-19T08:24:10.447494Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:976:2833] 2026-02-19T08:24:10.447518Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 3} Bootstrap called, sample# 0 2026-02-19T08:24:10.447550Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 3} Connect to# 72075186224037888 called 2026-02-19T08:24:10.447747Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:10.448236Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 3} finished in 0.000458s, read# 10 2026-02-19T08:24:10.448310Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:976:2833] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-02-19T08:24:10.448406Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:979:2836] 2026-02-19T08:24:10.448448Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 4} Bootstrap called, sample# 10 2026-02-19T08:24:10.448470Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 4} Connect to# 72075186224037888 called 2026-02-19T08:24:10.448612Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:10.448965Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:967:2824], subTag: 4} finished in 0.000309s, sampled# 10, iter finished# 1, oks# 10 2026-02-19T08:24:10.449060Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [2:958:2815], subTag: 3} received keyCount# 10 2026-02-19T08:24:10.449227Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [2:958:2815], subTag: 3} started read actor with id# [2:982:2839] 2026-02-19T08:24:10.449303Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:967:2824], subTag: 5} Bootstrap called, will read keys# 10 2026-02-19T08:24:10.775689Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [2:958:2815], subTag: 3} received point times# 1000, Inflight left# 0 2026-02-19T08:24:10.775904Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 326 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 29\n" } 2026-02-19T08:24:10.776088Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [2:958:2815], subTag: 3} finished in 0.332755s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 326 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 29\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-02-19T08:24:10.776204Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:967:2824] with tag# 3 >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13 ... recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:54:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:54:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:52:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:52:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:82:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:85:2057] recipient: [63:84:2115] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:87:2057] recipient: [63:84:2115] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:86:2116] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:202:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:52:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:52:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:82:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:85:2057] recipient: [64:84:2115] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:87:2057] recipient: [64:84:2115] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:86:2116] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:202:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:83:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:86:2057] recipient: [65:85:2115] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:88:2057] recipient: [65:85:2115] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:87:2116] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:203:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:54:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:54:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:84:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:87:2057] recipient: [66:86:2116] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:89:2057] recipient: [66:86:2116] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:88:2117] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:108:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:109:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:88:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:91:2057] recipient: [68:90:2120] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:93:2057] recipient: [68:90:2120] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:92:2121] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:208:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:52:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:52:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] !Reboot 72057594037927937 (actor [69:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:88:2057] recipient: [69:39:2086] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:91:2057] recipient: [69:90:2120] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:93:2057] recipient: [69:90:2120] !Reboot 72057594037927937 (actor [69:58:2099]) rebooted! !Reboot 72057594037927937 (actor [69:58:2099]) tablet resolver refreshed! new actor is[69:92:2121] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:208:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:59:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:76:2057] recipient: [70:14:2061] !Reboot 72057594037927937 (actor [70:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:89:2057] recipient: [70:39:2086] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:92:2057] recipient: [70:91:2120] Leader for TabletID 72057594037927937 is [70:93:2121] sender: [70:94:2057] recipient: [70:91:2120] !Reboot 72057594037927937 (actor [70:58:2099]) rebooted! !Reboot 72057594037927937 (actor [70:58:2099]) tablet resolver refreshed! new actor is[70:93:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:59:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:76:2057] recipient: [71:14:2061] >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldDropCreateTable [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query [GOOD] >> KqpBatchUpdate::Returning >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> IncrementalBackup::SimpleBackupRestoreWithIndex [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: 2026-02-19T08:21:52.840889Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488662865308161:2215];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:52.840948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:52.898511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:21:53.380322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:53.380421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:53.495985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:53.508598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:53.623139Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488662865307969:2081] 1771489312767502 != 1771489312767505 2026-02-19T08:21:53.650044Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:53.810600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:53.829435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:53.829461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:53.829468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:53.829561Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:53.845114Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:54.407484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:54.438924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:54.520949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.692809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:54.964468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:55.105569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.841885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488662865308161:2215];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:57.841976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:57.947835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488684340146341:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.947941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.948434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488684340146351:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.948474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.354133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.423663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.480858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.520971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.572901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.620283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.673869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.714091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:58.794659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488688635114517:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.794729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488688635114522:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.794733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.794954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488688635114524:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.794982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:58.798328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:58.809160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488688635114525:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:58.915079Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488688635114581:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges ... cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7608489220049304169:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:02.447577Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:02.481309Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:02.611720Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:02.615948Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7608489220049304113:2081] 1771489442442608 != 1771489442442611 2026-02-19T08:24:02.638496Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:02.638637Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:02.669344Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:02.731112Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:02.746273Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:02.746307Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:02.746319Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:02.746452Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:03.452691Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:03.647947Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:03.668291Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:03.750009Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:03.984720Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:04.104942Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:07.445242Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7608489220049304169:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:07.445355Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:24:07.733750Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489241524142481:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.733918Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.734509Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489241524142490:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.734586Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.857056Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:07.902175Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:07.948036Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:07.996004Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.043425Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.094596Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.147061Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.218572Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.335580Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489245819110658:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:08.335670Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489245819110663:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:08.335701Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:08.335875Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489245819110665:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:08.335947Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:08.340828Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:08.354872Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7608489245819110666:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:24:08.414399Z node 13 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [13:7608489245819110718:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:11.145386Z node 13 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489451179, txId: 281474976710673] shutting down 2026-02-19T08:24:11.451850Z node 13 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489451487, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:06.465370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:06.583766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:06.603454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:06.603531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:06.604037Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:06.894369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:06.894503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:06.968113Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489443998195 != 1771489443998199 2026-02-19T08:24:06.975811Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:07.021019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:07.114060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:07.400840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:07.414475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:07.519255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:07.800604Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2026-02-19T08:24:07.800747Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-02-19T08:24:07.873342Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor finished in 0.072175s, errors=0 2026-02-19T08:24:07.873444Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 ... waiting for SysViewsRoster update finished 2026-02-19T08:24:11.184737Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:11.188855Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:11.192621Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:11.192887Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:11.192929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:11.412938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:11.413093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:11.441537Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489448360544 != 1771489448360548 2026-02-19T08:24:11.444896Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:11.530597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:11.611971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:11.892060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:11.905120Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:12.008701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:12.233579Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-02-19T08:24:12.233677Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-02-19T08:24:12.289128Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor finished in 0.055186s, errors=0 2026-02-19T08:24:12.289208Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:959:2816] with tag# 2 >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> KqpBatchPEA::PrepareState_PartitioningResolutionError >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> TConsoleTests::TestDatabaseQuotas [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> ReadLoad::ShouldReadKqpMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:06.560052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:06.645330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:06.661484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:06.661552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:06.662134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:06.905286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:06.905417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:06.985439Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489443895800 != 1771489443895804 2026-02-19T08:24:06.999011Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:07.045686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:07.147012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:07.418457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:07.431610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:07.536540Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2026-02-19T08:24:07.749135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:870:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.749292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.749624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:888:2757], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.749681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:07.764327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.097042Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2026-02-19T08:24:08.098878Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:867:2749], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-02-19T08:24:08.113885Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:867:2749], subTag: 1} TUpsertActor finished in 0.014672s, errors=0 2026-02-19T08:24:08.114175Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:08.114309Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:867:2749], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-02-19T08:24:08.171615Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:867:2749], subTag: 3} TUpsertActor finished in 0.057001s, errors=0 2026-02-19T08:24:08.171706Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:977:2827] with tag# 3 ... waiting for SysViewsRoster update finished 2026-02-19T08:24:11.240738Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:11.244564Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:11.247992Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:11.248258Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:11.248311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:11.480125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:11.480246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:11.507315Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489448671749 != 1771489448671753 2026-02-19T08:24:11.511395Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:11.555511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:11.636217Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:11.911061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:11.924433Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:12.027091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:12.244183Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2026-02-19T08:24:12.244271Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 2026-02-19T08:24:12.640932Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor finished in 0.396426s, errors=0 2026-02-19T08:24:12.641006Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:959:2816] with tag# 2 2026-02-19T08:24:12.646275Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2026-02-19T08:24:12.659447Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1000:2857], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.659553Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.659846Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1011:2861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.659886Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.849519Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2026-02-19T08:24:12.867565Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1051:2900], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.867717Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.868115Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1055:2903], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.868224Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.880658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:12.932565Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:24:13.149658Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2026-02-19T08:24:13.150005Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:997:2854], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-02-19T08:24:13.161682Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:997:2854], subTag: 1} TUpsertActor finished in 0.011381s, errors=0 2026-02-19T08:24:13.161948Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:13.162051Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:997:2854], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-02-19T08:24:13.217848Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:997:2854], subTag: 3} TUpsertActor finished in 0.055592s, errors=0 2026-02-19T08:24:13.217922Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:1145:2975] with tag# 3 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease |93.4%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> KqpBatchUpdate::TableWithAsyncIndex+EnableIndexStreamWrite >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> THealthCheckTest::ShardsLimit800 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:11.637582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:11.750110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:11.768576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:11.768655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:11.769173Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:12.066573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:12.066728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:12.147166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489449204491 != 1771489449204495 2026-02-19T08:24:12.160243Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:12.206745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:12.290323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:12.570409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:12.583191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:12.684629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:12.963873Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-02-19T08:24:12.963967Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-02-19T08:24:12.967031Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} started# 5 actors each with inflight# 4 2026-02-19T08:24:12.967089Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:12.967132Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:12.967150Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:12.967175Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:12.967197Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-02-19T08:24:12.969812Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} session: ydb://session/3?node_id=1&id=ZmQ1ZGI2YzMtODcwZjM5NDctZGQwZDg4YjUtNjJlZDdlMzk= 2026-02-19T08:24:12.971186Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} session: ydb://session/3?node_id=1&id=MjIyMGMxMjMtZWZmN2VhNzUtNzc4NmY3OWMtNTgwMjVkMjA= 2026-02-19T08:24:12.972384Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} session: ydb://session/3?node_id=1&id=YjAwMzRiZS03MjUzMmE3My1mZTRkMTY4LWEyN2MxY2U= 2026-02-19T08:24:12.973582Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} session: ydb://session/3?node_id=1&id=NmNiNjI5YzQtNjEzY2ZkOTktN2NmZTdjYjUtZjA4OTdkYzY= 2026-02-19T08:24:12.975010Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} session: ydb://session/3?node_id=1&id=ZjZkYTc5ZjEtODgzN2Q4NGItNTU1NzVmMWItMWJlZmI2YzM= 2026-02-19T08:24:12.977848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:972:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.977947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.977983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.978041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:998:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.978111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.978168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1001:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.978247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.979774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.980061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:12.985550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:13.047424Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1015:2866] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.048443Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1019:2870] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.049051Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1021:2872] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.049460Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1028:2873] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.166260Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.166351Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.166394Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1010:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.166436Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1011:2862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.166480Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1012:2863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.200998Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2930] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.709694Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} finished in 1771489453.709638s, errors=0 2026-02-19T08:24:13.709942Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1771489453709 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.723994Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1171:2968] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.785753Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} finished in 1771489453.785650s, errors=0 2026-02-19T08:24:13.785917Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1771489453785 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.798752Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1218:2990] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.815302Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1234:2999] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.884751Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} finished in 1771489453.884712s, errors=0 2026-02-19T08:24:13.885186Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1771489453884 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.885241Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} finished in 1771489453.885226s, errors=0 2026-02-19T08:24:13.885308Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1771489453885 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.898698Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1308:3030] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.960005Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} finished in 1771489453.959959s, errors=0 2026-02-19T08:24:13.960152Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1771489453959 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.960233Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} finished in 0.993285s, oks# 20, errors# 0 2026-02-19T08:24:13.960361Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table >> KqpBatchDelete::ManyPartitions_1 >> KqpBatchPEA::ExecuteState_AbortAfterPartialCompletion >> KqpResultSetFormats::ArrowFormat_BulkUpsert-IsOlap [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> BasicUsage::KeyedWriteSession_SessionClosedDueToUserError [GOOD] >> KqpBatchDelete::TableWithSyncIndex+EnableIndexStreamWrite >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:12.776443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:12.876653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:12.894902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:12.894984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:12.895562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:13.185826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:13.185983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:13.236601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489450362012 != 1771489450362016 2026-02-19T08:24:13.244504Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:13.287041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:13.386342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:13.672695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:13.686184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:13.790921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:14.078910Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-02-19T08:24:14.079069Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-02-19T08:24:14.179901Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor finished in 0.100384s, errors=0 2026-02-19T08:24:14.180000Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> BasicUsage::KeyedWriteSession_NoAutoPartitioning_HashPartitionChooser >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> KqpBatchUpdate::SimpleOnePartition >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles >> TxUsage::WriteToTopic_Demo_12_Table [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> KqpBatchUpdate::Returning [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] >> THealthCheckTest::BridgeTimeDifference >> TxUsage::WriteToTopic_Demo_12_Query >> TConsoleTests::TestDatabaseQuotasBadStorageQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:11.978782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:12.064253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:12.078192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:12.078269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:12.078739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:12.316345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:12.316456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:12.398072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489449195004 != 1771489449195008 2026-02-19T08:24:12.406288Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:12.448010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:12.552317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:12.824208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:12.837613Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:12.947122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:13.225012Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2026-02-19T08:24:13.225109Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2026-02-19T08:24:13.228798Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} started# 5 actors each with inflight# 4 2026-02-19T08:24:13.228868Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-02-19T08:24:13.228928Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-02-19T08:24:13.228974Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-02-19T08:24:13.229002Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-02-19T08:24:13.229029Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-02-19T08:24:13.232515Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} session: ydb://session/3?node_id=1&id=NTUxNzBlMmItMjUyMjUyNzItMzM3MTgyZTktYTRmNGU0ZTc= 2026-02-19T08:24:13.234115Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} session: ydb://session/3?node_id=1&id=YWJiMzJhNDctYTI5Y2MwOWItODhiYmVmMzYtZmMxMWFkOGI= 2026-02-19T08:24:13.235887Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} session: ydb://session/3?node_id=1&id=ZmUxNjlmY2YtZDhhODI2NmEtZTlhMzdhZi0zZWM5OTIzYw== 2026-02-19T08:24:13.237094Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} session: ydb://session/3?node_id=1&id=ZDMyODIxNDktNTNmNTZmLTMzODA4NzU0LTViNWE2MDM4 2026-02-19T08:24:13.238526Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} session: ydb://session/3?node_id=1&id=NDVmMzhhOGEtNDVjMTBiZDAtNzZhYWY4YWEtNmEwMWZiYTA= 2026-02-19T08:24:13.241846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:972:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.241952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.242016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.242064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:998:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.242119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.242158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1001:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.242269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.244110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.244438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.251520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:13.294334Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1015:2866] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.295120Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1019:2870] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.295727Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1021:2872] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.296132Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1028:2873] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:13.414606Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.414731Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1009:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.414806Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1010:2861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.414878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1011:2862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.414927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1012:2863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:13.449188Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2930] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:13.960373Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 5} finished in 1771489453.960330s, errors=0 2026-02-19T08:24:13.960695Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1771489453960 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:13.974018Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1171:2968] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:14.040960Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 3} finished in 1771489454.040922s, errors=0 2026-02-19T08:24:14.041167Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1771489454040 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:14.055498Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1218:2990] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:14.091687Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1244:3003] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:14.153099Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 1} finished in 1771489454.153059s, errors=0 2026-02-19T08:24:14.153455Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1771489454153 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:14.170563Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 4} finished in 1771489454.170513s, errors=0 2026-02-19T08:24:14.170841Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1771489454170 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:14.184743Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1309:3031] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:14.249236Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:959:2816], subTag: 2} finished in 1771489454.249190s, errors=0 2026-02-19T08:24:14.249526Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:958:2815], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1771489454249 OperationsOK: 4 OperationsError: 0 } 2026-02-19T08:24:14.249581Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 2} finished in 1.020968s, oks# 20, errors# 0 2026-02-19T08:24:14.249741Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2026-02-19T08:23:49.765045Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:49.765191Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:49.780800Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:49.780905Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:49.795792Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:49.796822Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=10498720889798149203, session=0, seqNo=0) 2026-02-19T08:23:49.797015Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:49.827717Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=10498720889798149203, session=1) 2026-02-19T08:23:49.828042Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2160], cookie=18264684759083805388, session=0, seqNo=0) 2026-02-19T08:23:49.828188Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:49.842546Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2160], cookie=18264684759083805388, session=2) 2026-02-19T08:23:49.843791Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:49.843967Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:49.844084Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:49.858851Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-02-19T08:23:49.859232Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=112, session=1, semaphore="Lock2" count=1) 2026-02-19T08:23:49.859341Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-02-19T08:23:49.859407Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-02-19T08:23:49.873374Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=112) 2026-02-19T08:23:49.873779Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2160], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:23:49.874013Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2160], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-02-19T08:23:49.889269Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2160], cookie=222) 2026-02-19T08:23:49.889353Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2160], cookie=223) 2026-02-19T08:23:49.889737Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2160], cookie=333, session=2, semaphore="Lock1" count=1) 2026-02-19T08:23:49.890110Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2160], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2026-02-19T08:23:49.910660Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2160], cookie=333) 2026-02-19T08:23:49.910739Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2160], cookie=334) 2026-02-19T08:23:50.388534Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:50.408970Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:50.805219Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:50.818001Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:51.214368Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:51.233791Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:51.596409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:51.618468Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:52.000145Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:52.014444Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:52.406078Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:52.420804Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:52.813159Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:52.825833Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:53.184985Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:53.199928Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:53.572118Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:53.587868Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:54.005287Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:54.017835Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:54.391141Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:54.406440Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:54.777304Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:54.789144Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:55.174072Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:55.194468Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:55.590141Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:55.602001Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:55.999715Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.012081Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:56.375139Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.387400Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:56.756464Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.768459Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.140295Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:57.152730Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.513362Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:57.525295Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.919696Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:57.931802Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:58.298692Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:58.314518Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:58.668473Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:58.680924Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.043446Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.055278Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.415125Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.427179Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.819755Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.831970Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:00.195784Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:00.208452Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:00.571275Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:00.583193Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:00.944202Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:00.956209Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:01.320878Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:01.334300Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:01.708909Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2026-02-19T08:24:01.709053Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2026-02-19T08:24:01.721341Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=2) 2026-02-19T ... 50:2172], cookie=10666911075410451489, name="Sem1", limit=3) 2026-02-19T08:24:14.721347Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:24:14.733481Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:150:2172], cookie=10666911075410451489) 2026-02-19T08:24:14.733826Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-02-19T08:24:14.733983Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:24:14.734199Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-02-19T08:24:14.734281Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-02-19T08:24:14.734366Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-02-19T08:24:14.746470Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-02-19T08:24:14.746550Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-02-19T08:24:14.746580Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=333) 2026-02-19T08:24:14.747075Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:158:2180], cookie=10089550596971985598, name="Sem1") 2026-02-19T08:24:14.747163Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:158:2180], cookie=10089550596971985598) 2026-02-19T08:24:14.747620Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:161:2183], cookie=472285251160470029, name="Sem1") 2026-02-19T08:24:14.747692Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:161:2183], cookie=472285251160470029) 2026-02-19T08:24:14.747936Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=444, session=1, semaphore="Sem1" count=1) 2026-02-19T08:24:14.748042Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-02-19T08:24:14.761781Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=444) 2026-02-19T08:24:14.762366Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:166:2188], cookie=12189450460066257682, name="Sem1") 2026-02-19T08:24:14.762442Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:166:2188], cookie=12189450460066257682) 2026-02-19T08:24:14.762824Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:169:2191], cookie=8131229505443095168, name="Sem1") 2026-02-19T08:24:14.762883Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:169:2191], cookie=8131229505443095168) 2026-02-19T08:24:14.775929Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:14.776059Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:14.776591Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:14.777259Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:14.826310Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:14.826486Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:24:14.826535Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-02-19T08:24:14.826562Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-02-19T08:24:14.826917Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:208:2221], cookie=2118852521212868564, name="Sem1") 2026-02-19T08:24:14.827007Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:208:2221], cookie=2118852521212868564) 2026-02-19T08:24:14.827577Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:217:2229], cookie=6780113415449805612, name="Sem1") 2026-02-19T08:24:14.827677Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:217:2229], cookie=6780113415449805612) 2026-02-19T08:24:15.310208Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:15.310337Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:15.329951Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:15.330532Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:15.365150Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:15.365684Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=4796632913553415313, session=0, seqNo=0) 2026-02-19T08:24:15.365885Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:24:15.377976Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=4796632913553415313, session=1) 2026-02-19T08:24:15.378278Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=55997710889393496, session=0, seqNo=0) 2026-02-19T08:24:15.378402Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:24:15.390423Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=55997710889393496, session=2) 2026-02-19T08:24:15.390748Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=6725931554678059978, session=0, seqNo=0) 2026-02-19T08:24:15.390872Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-02-19T08:24:15.402902Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=6725931554678059978, session=3) 2026-02-19T08:24:15.403458Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=6234404085901531603, name="Sem1", limit=3) 2026-02-19T08:24:15.403603Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:24:15.415634Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=6234404085901531603) 2026-02-19T08:24:15.415987Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2026-02-19T08:24:15.416135Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:24:15.416347Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Sem1" count=2) 2026-02-19T08:24:15.416556Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2026-02-19T08:24:15.428437Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-02-19T08:24:15.428516Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-02-19T08:24:15.428549Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2026-02-19T08:24:15.429075Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=8468871678304682560, name="Sem1") 2026-02-19T08:24:15.429162Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=8468871678304682560) 2026-02-19T08:24:15.429573Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=13599651460809730240, name="Sem1") 2026-02-19T08:24:15.429656Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=13599651460809730240) 2026-02-19T08:24:15.429952Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=444, name="Sem1") 2026-02-19T08:24:15.430047Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-02-19T08:24:15.430110Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-02-19T08:24:15.430159Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-02-19T08:24:15.442168Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=444) 2026-02-19T08:24:15.442804Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=16166037473152320535, name="Sem1") 2026-02-19T08:24:15.442898Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=16166037473152320535) 2026-02-19T08:24:15.443344Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=5842284649708133639, name="Sem1") 2026-02-19T08:24:15.443415Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=5842284649708133639) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:11.679551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:11.798383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:11.817523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:11.817603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:11.818193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:12.113527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:12.113660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:12.188082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489449126594 != 1771489449126598 2026-02-19T08:24:12.202095Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:12.248859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:12.340927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:12.630082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:12.643872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:12.746478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:13.050111Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:13.050256Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-02-19T08:24:13.146618Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor finished in 0.095909s, errors=0 2026-02-19T08:24:13.146732Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 ... waiting for SysViewsRoster update finished 2026-02-19T08:24:16.673137Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:16.677099Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:16.680644Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:16.680899Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:16.680943Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:16.906009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:16.906136Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:16.932384Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489453713693 != 1771489453713697 2026-02-19T08:24:16.935787Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:17.028249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:17.110169Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:17.374515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:17.388553Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:17.492648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:17.719191Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:17.719337Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-02-19T08:24:17.806676Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor finished in 0.086965s, errors=0 2026-02-19T08:24:17.806781Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:959:2816] with tag# 2 >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_BulkUpsert-IsOlap [GOOD] Test command err: 2026-02-19T08:21:51.519213Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488659942625307:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:51.519290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:52.078222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:52.078338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:52.166826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:52.188772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:52.198218Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:52.208699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488659942625277:2081] 1771489311511454 != 1771489311511457 2026-02-19T08:21:52.334530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:52.334568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:52.334575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:52.334659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:52.429261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:52.557921Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:53.029101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:53.050244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:21:53.184417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.428596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.677538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:53.792247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.010514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488681417463627:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.010656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.011083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488681417463637:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.011130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:56.517567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488659942625307:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:56.517632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:56.530851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.564822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.603191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.641013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.686140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.727627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.802630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:56.877750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:57.047363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488685712431806:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.047460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.047934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488685712431811:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.047972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488685712431812:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.048073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:57.052304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:57.078271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488685712431815:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:57.152032Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488685712431866:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280;columns=3; 2026-02-19T08:22:00.525563Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTab ... 224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.654087Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.654173Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.654185Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.654216Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.654216Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671408Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671408Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671484Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671491Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671534Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.671534Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689418Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689418Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689490Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689495Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689524Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.689524Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.706659Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.706755Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-02-19T08:24:01.706792Z node 12 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=42;result=not_found; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1888;columns=25; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1888;columns=25; 2026-02-19T08:24:07.846471Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7608489243302253453:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:07.846671Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:07.867886Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:07.992065Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:07.998133Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7608489243302253415:2081] 1771489447844660 != 1771489447844663 2026-02-19T08:24:08.015950Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:08.016098Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:08.045247Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:08.098384Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:08.119233Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:08.119269Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:08.119281Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:08.119419Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:08.856099Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:08.954397Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:12.846966Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7608489243302253453:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:12.847102Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:24:13.888915Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489269072058096:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.888924Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489269072058084:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.889042Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.889376Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7608489269072058104:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.889490Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:13.893576Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:13.905335Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7608489269072058103:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:24:13.987007Z node 13 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [13:7608489269072058156:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:14.070104Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2152;columns=28; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2152;columns=28; |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:13.314224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:13.431556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:13.447845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:13.447910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:13.448355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:13.678538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:13.678667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:13.736533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489450777460 != 1771489450777464 2026-02-19T08:24:13.748150Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:13.789616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:13.887123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:14.176887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:14.190857Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:14.295771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:14.565116Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:14.565278Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-02-19T08:24:14.624054Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 2} TUpsertActor finished in 0.058348s, errors=0 2026-02-19T08:24:14.624168Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:959:2816] with tag# 2 ... waiting for SysViewsRoster update finished 2026-02-19T08:24:17.607728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:17.612630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:17.616714Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:17.616974Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:17.617015Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:17.863008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:17.863128Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:17.889121Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489455190526 != 1771489455190530 2026-02-19T08:24:17.892312Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:17.938014Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:18.018405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:18.316626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:18.329473Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:18.431508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:18.646947Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-02-19T08:24:18.647075Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-02-19T08:24:18.704626Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:958:2815], subTag: 2} TUpsertActor finished in 0.057192s, errors=0 2026-02-19T08:24:18.704732Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:959:2816] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:23:25.171503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:25.171596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:25.171671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:25.171713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:25.171753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:25.171801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:25.171840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:25.171881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:25.171963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:25.172042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:25.172891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:25.173328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:25.268974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:25.269029Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:25.284972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:25.285792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:25.286000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:25.298244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:25.299021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:25.299817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:25.300171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:25.303747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:25.303962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:25.305358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:25.305431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:25.305572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:25.305630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:25.305683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:25.305910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:25.463099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.464997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.465060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:25.465159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... te::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2026-02-19T08:24:13.484229Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2026-02-19T08:24:13.484273Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2026-02-19T08:24:13.484350Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-02-19T08:24:13.484388Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-02-19T08:24:13.484421Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-02-19T08:24:13.484446Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-02-19T08:24:13.484475Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2026-02-19T08:24:13.485020Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.485276Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.485351Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-02-19T08:24:13.485421Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], version: 5 2026-02-19T08:24:13.485507Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 4 2026-02-19T08:24:13.486793Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.486882Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.486913Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-02-19T08:24:13.486961Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 5 2026-02-19T08:24:13.486994Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 6 2026-02-19T08:24:13.487080Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2026-02-19T08:24:13.492624Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.493829Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-02-19T08:24:13.506800Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1418 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-02-19T08:24:13.506867Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-02-19T08:24:13.507089Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1418 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-02-19T08:24:13.507281Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1418 } } CommitVersion { Step: 400 TxId: 281474976715657 } debug: NTableState::TProposedWaitParts operationId# 281474976715657:1 2026-02-19T08:24:13.508503Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 1261 RawX2: 81604381770 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-02-19T08:24:13.508590Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-02-19T08:24:13.508840Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 1261 RawX2: 81604381770 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-02-19T08:24:13.508958Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2026-02-19T08:24:13.509161Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 1261 RawX2: 81604381770 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-02-19T08:24:13.509284Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2026-02-19T08:24:13.509360Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-02-19T08:24:13.509452Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2026-02-19T08:24:13.509531Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:1 ProposedWaitParts -> Done 2026-02-19T08:24:13.513353Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-02-19T08:24:13.514854Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-02-19T08:24:13.515439Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-02-19T08:24:13.515512Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2026-02-19T08:24:13.515730Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-02-19T08:24:13.515808Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-02-19T08:24:13.515892Z node 19 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-02-19T08:24:13.515960Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-02-19T08:24:13.516030Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2026-02-19T08:24:13.516093Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-02-19T08:24:13.516158Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-02-19T08:24:13.516250Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715657:0 2026-02-19T08:24:13.516370Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 3 2026-02-19T08:24:13.516423Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:1 2026-02-19T08:24:13.516449Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715657:1 2026-02-19T08:24:13.516537Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 5 2026-02-19T08:24:13.516572Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:2 2026-02-19T08:24:13.516593Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715657:2 2026-02-19T08:24:13.516621Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: 2026-02-19T08:24:13.972911Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489266352839683:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:13.973440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:14.177726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:14.177836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:14.206736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:14.225857Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:14.227197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:14.227689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489266352839652:2081] 1771489453971240 != 1771489453971243 2026-02-19T08:24:14.316734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:14.316772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:14.316778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:14.316890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:14.483717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:16.705749Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d7/r3tmp/spilling-tmp-runner/node_1_5125988a-d03dd720-2c179409-b623e7cf, actor: [1:7608489279237743377:2384] 2026-02-19T08:24:16.706109Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d7/r3tmp/spilling-tmp-runner 2026-02-19T08:24:18.556255Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556320Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556342Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556362Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556389Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556420Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556444Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556478Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556512Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556604Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556679Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:18.556807Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:18.556974Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:18.557000Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:18.557289Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:18.557450Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557519Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557610Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557679Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557753Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557800Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557840Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557891Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557943Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:18.557994Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 1:7608489287827679057:2528], CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587164Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489287827679056:2527], CA [1:7608489287827679057:2528], CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587409Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489287827679056:2527] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 761 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 471 FinishTimeMs: 1771489458584 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 458 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489458566 UpdateTimeMs: 1771489458584 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.587432Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489287827679056:2527] 2026-02-19T08:24:18.587460Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489287827679057:2528], CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587481Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489287827679057:2528], CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587711Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489287827679057:2528] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1504 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 420 FinishTimeMs: 1771489458584 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 405 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489458567 UpdateTimeMs: 1771489458584 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:18.587732Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489287827679057:2528] 2026-02-19T08:24:18.587755Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587777Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489287827679058:2529], CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.587962Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489287827679058:2529] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1273 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 944 FinishTimeMs: 1771489458584 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 14 BuildCpuTimeUs: 930 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489458569 UpdateTimeMs: 1771489458584 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:18.587980Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489287827679058:2529] 2026-02-19T08:24:18.587996Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.588008Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489287827679059:2530], CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.588200Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489287827679059:2530] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1332 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 961 FinishTimeMs: 1771489458584 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 949 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489458570 UpdateTimeMs: 1771489458584 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:18.588216Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489287827679059:2530] 2026-02-19T08:24:18.588228Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.588240Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489287827679060:2531], trace_id# 2026-02-19T08:24:18.588496Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489287827679060:2531] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2133 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1629 FinishTimeMs: 1771489458585 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 24 BuildCpuTimeUs: 1605 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489458571 UpdateTimeMs: 1771489458585 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:18.588518Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489287827679060:2531] 2026-02-19T08:24:18.588551Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489287827679046:2400] trace_id# 2026-02-19T08:24:18.588579Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.012237s ReadRows: 0 ReadBytes: 0 RequestUnits# 8 ForceFlag# true trace_id# 2026-02-19T08:24:18.609342Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:18.609369Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489287827679047:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzx52ah1w16h1s6mvp612, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZiMzg0NzctMzUzMWUyMjMtOTY5OTdiZDEtMmE5MjQ5MDI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:42.521002Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:42.522158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:42.526384Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:42.650709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:42.653311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:42.662171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:303:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:42.662520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:42.662943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:42.664974Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:42.665162Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:43.125226Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:43.271647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:43.271790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:43.272189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:43.272247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:43.345705Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:23:43.346476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:43.346953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:43.373546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:43.493230Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:44.203825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:44.282508Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:44.282641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:44.646161Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:228:2184] Handle TEvProposeTransaction 2026-02-19T08:23:44.646231Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:228:2184] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:23:44.646361Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:228:2184] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1576:3028] 2026-02-19T08:23:44.803509Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:1576:3028] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:23:44.803607Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:1576:3028] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:23:44.804392Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:1576:3028] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:23:44.804482Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:1576:3028] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:23:44.804896Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:1576:3028] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:23:44.805044Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:1576:3028] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:23:44.805219Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1576:3028] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:23:44.807545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:44.808118Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:1576:3028] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:23:44.816046Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:1576:3028] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:23:44.816155Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:1576:3028] txid# 281474976710657 SEND to# [1:1504:2986] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:23:44.921297Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1653:3085] 2026-02-19T08:23:44.921571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:23:44.984597Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1655:3086] 2026-02-19T08:23:44.984803Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:23:44.999408Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:23:44.999616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:23:45.001904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:23:45.001989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:23:45.002029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:23:45.002397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:23:45.002844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:23:45.002922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:1745:3085] in generation 1 2026-02-19T08:23:45.018457Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1657:3087] 2026-02-19T08:23:45.018700Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:23:45.028673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:23:45.028988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:23:45.030686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-02-19T08:23:45.030758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037894 2026-02-19T08:23:45.030822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037894 2026-02-19T08:23:45.031138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:23:45.032002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:23:45.032074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037894 persisting started state actor id [1:1766:3086] in generation 1 2026-02-19T08:23:45.044992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:23:45.050105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:23:45.051420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2026-02-19T08:23:45.051490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037892 2026-02-19T08:23:45.051548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037892 2026-02-19T08:23:45.051882Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:23:45.052099Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:23:45.052176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037892 persisting started state actor id [1:1783:3087] in generation 1 2026-02-19T08:23:45.092300Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1749:2411] 2026-02-19T08:23:45.092569Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:23:45.142238Z node 2 :TX_DATASHARD INFO ... 0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2026-02-19T08:24:18.081758Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:37: SelfId: [5:2198:3394], TxId: 281474976715665, task: 4. Ctx: { TraceId : 01khtfzv0g7q0fcqdnp7xq4dq1. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2026-02-19T08:24:18.081800Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:405: SelfId: [5:2198:3394], TxId: 281474976715665, task: 4. Ctx: { TraceId : 01khtfzv0g7q0fcqdnp7xq4dq1. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2026-02-19T08:24:18.081843Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [5:2198:3394], TxId: 281474976715665, task: 4. Ctx: { TraceId : 01khtfzv0g7q0fcqdnp7xq4dq1. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:24:18.081951Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715665;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:24:18.084870Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715665, taskId: 4. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:24:18.085524Z node 6 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.510488s 2026-02-19T08:24:18.085603Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.510488s 2026-02-19T08:24:18.086554Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2198:3394] TaskId# 4 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 349795 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 348569 FinishTimeMs: 1771489458076 InputRows: 1 InputBytes: 7 ComputeCpuTimeUs: 98 BuildCpuTimeUs: 348471 HostName: "ghrun-fkrokuvqke" NodeId: 5 StartTimeMs: 1771489458076 CreateTimeMs: 1771489456569 UpdateTimeMs: 1771489458076 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.086626Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2198:3394], CA [5:2197:3393], CA [6:2200:2525], CA [5:2196:3392], trace_id# ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 4 VirtualTimestamp { Step: 3500 TxId: 281474976715663 } Finished: true 2026-02-19T08:24:18.089207Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2196:3392] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 264144 DurationUs: 7000 Tasks { TaskId: 1 CpuTimeUs: 260932 FinishTimeMs: 1771489458077 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 54 ReadBytes: 432 } ComputeCpuTimeUs: 113 BuildCpuTimeUs: 260819 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-fkrokuvqke" NodeId: 5 StartTimeMs: 1771489458070 CreateTimeMs: 1771489456237 UpdateTimeMs: 1771489458077 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.089287Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2196:3392] 2026-02-19T08:24:18.089361Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2198:3394], CA [5:2197:3393], CA [6:2200:2525], trace_id# 2026-02-19T08:24:18.089725Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2197:3393] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 74437 DurationUs: 13000 Tasks { TaskId: 3 StageId: 1 CpuTimeUs: 70848 FinishTimeMs: 1771489458079 InputRows: 2 InputBytes: 14 OutputRows: 1 OutputBytes: 7 ComputeCpuTimeUs: 190 BuildCpuTimeUs: 70658 WaitOutputTimeUs: 3193 HostName: "ghrun-fkrokuvqke" NodeId: 5 StartTimeMs: 1771489458066 CreateTimeMs: 1771489456498 UpdateTimeMs: 1771489458079 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.089771Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2197:3393] 2026-02-19T08:24:18.089821Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2198:3394], CA [6:2200:2525], trace_id# 2026-02-19T08:24:18.089965Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2198:3394] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 350458 DurationUs: 5000 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 348573 FinishTimeMs: 1771489458081 InputRows: 1 InputBytes: 7 OutputRows: 1 OutputBytes: 7 ResultRows: 1 ResultBytes: 7 ComputeCpuTimeUs: 102 BuildCpuTimeUs: 348471 HostName: "ghrun-fkrokuvqke" NodeId: 5 StartTimeMs: 1771489458076 CreateTimeMs: 1771489456569 UpdateTimeMs: 1771489458081 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.090006Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2198:3394] 2026-02-19T08:24:18.090044Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [6:2200:2525], trace_id# 2026-02-19T08:24:18.090710Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [6:2200:2525] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 251315 DurationUs: 14000 Tasks { TaskId: 2 CpuTimeUs: 249008 FinishTimeMs: 1771489458072 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 46 ReadBytes: 368 } ComputeCpuTimeUs: 152 BuildCpuTimeUs: 248856 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-fkrokuvqke" NodeId: 6 StartTimeMs: 1771489458058 CreateTimeMs: 1771489456921 UpdateTimeMs: 1771489458072 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:18.090762Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [6:2200:2525] 2026-02-19T08:24:18.091144Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:18.091221Z node 5 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:18.091305Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [5:2188:3253] TxId: 281474976715665. Ctx: { TraceId: 01khtfzv0g7q0fcqdnp7xq4dq1, Database: , SessionId: ydb://session/3?node_id=5&id=NzhiY2EwZWUtMjQyMDE0NzktZjIyZGQxN2UtYmQ4ZWMwZWU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.008709s ReadRows: 100 ReadBytes: 800 RequestUnits# 100 ForceFlag# true trace_id# 2026-02-19T08:24:18.091408Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 4000 2026-02-19T08:24:18.091480Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2830: CheckMediatorStateRestored at 72075186224037890: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" EffectivePoolId: "default" } YdbStatus: SUCCESS ConsumedRu: 224 >> KqpBatchUpdate::TableNotExists >> KqpBatchPEA::PrepareState_PartitioningResolutionError [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query >> KqpBatchUpdate::TableWithAsyncIndex+EnableIndexStreamWrite [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchDelete::TableWithUniqueSyncIndex+EnableIndexStreamWrite >> TKesusTest::TestAcquireLocks [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> KqpBatchDelete::ManyPartitions_3 >> KqpBatchPEA::PrepareState_AbortExecution >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> KqpBatchPEA::ExecuteState_ChildExecuterAbort >> KqpBatchPEA::ExecuteState_AbortAfterPartialCompletion [GOOD] >> KqpBatchDelete::TableWithSyncIndex+EnableIndexStreamWrite [GOOD] >> KqpBatchDelete::UnknownColumn >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates >> TKesusTest::TestAcquireDowngrade >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> KqpBatchPEA::AbortState_UnknownEvent >> KqpBatchDelete::TableWithSyncIndex-EnableIndexStreamWrite >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> KqpBatchUpdate::TableWithUniqueSyncIndex+EnableIndexStreamWrite >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2026-02-19T08:22:52.906940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:52.907000Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:53.126473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:54.126384Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:54.126973Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:54.128891Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8290055791858666905 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:54.274061Z node 7 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:54.312848Z node 7 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:54.313028Z node 7 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:54.422569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:22:54.648049Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:54.648639Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:54.648935Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpWN32Ua/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16884792448258528229 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunk ... Cfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:11.491313Z node 140 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:24:11.491772Z node 140 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:24:11.491927Z node 140 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16404011750052717941 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:11.523611Z node 138 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:24:11.524114Z node 138 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:24:11.524250Z node 138 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14314205025589643726 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:11.526717Z node 138 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:24:11.862575Z node 136 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:11.862666Z node 136 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:11.875212Z node 136 :STATISTICS WARN: tx_init.cpp:292: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-02-19T08:24:12.049151Z node 142 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:24:12.049607Z node 142 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:24:12.049908Z node 142 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/00370d/r3tmp/tmpR0WDGh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6278987995859871351 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:15.213920Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:15.214018Z node 145 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:15.316872Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:18.852761Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:18.852870Z node 154 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:18.991713Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |93.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithAsyncIndex+EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:24:15.668566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489275656590413:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:15.669316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:15.870876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:15.871006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:15.899891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:15.920478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:15.959610Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489275656590365:2081] 1771489455664422 != 1771489455664425 2026-02-19T08:24:15.964817Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:16.016032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:16.016053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:16.016081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:16.016160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:16.070636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:18.473017Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d4/r3tmp/spilling-tmp-runner/node_1_e1372977-5b23a5e6-6f706f45-88a4030a, actor: [1:7608489288541493109:2311] 2026-02-19T08:24:18.473221Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d4/r3tmp/spilling-tmp-runner 2026-02-19T08:24:19.157631Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:19.157698Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:19.157913Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:19.157948Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:19.158137Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 1 2026-02-19T08:24:19.158303Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:19.158485Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '"2")) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (return (Iterator (AsList $4 $5 $6 $7 $8))) )))) ) 2026-02-19T08:24:19.158521Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:19.158743Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:19.159166Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489292836460728:2325] 2026-02-19T08:24:19.159225Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489292836460728:2325], channels: 0 2026-02-19T08:24:19.159341Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:19.159376Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:19.159402Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489292836460728:2325] 2026-02-19T08:24:19.159439Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489292836460728:2325], channels: 0 2026-02-19T08:24:19.159527Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489292836460728:2325], trace_id# 2026-02-19T08:24:19.159583Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489292836460728:2325], trace_id# 2026-02-19T08:24:19.159626Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:19.172255Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489292836460728:2325] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:19.172329Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489292836460728:2325], trace_id# 2026-02-19T08:24:19.172375Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489292836460728:2325], trace_id# 2026-02-19T08:24:19.173329Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489292836460725:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489292836460728:2325] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2296 Tasks { TaskId: 1 CpuTimeUs: 1484 FinishTimeMs: 1771489459172 EgressBytes: 105 EgressRows: 5 ComputeCpuTimeUs: 36 BuildCpuTimeUs: 1448 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489459159 UpdateTimeMs: 1771489459172 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:19.173394Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710661. Ctx: { TraceId: 01khtfzyt6ezwxf9zjn937edgv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Compute ac ... 00Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489301426395604:2445], channels: 2 2026-02-19T08:24:21.488112Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489301426395603:2444], channels: 1 2026-02-19T08:24:21.488157Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# true trace_id# 2026-02-19T08:24:21.488172Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:21.488184Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489301426395604:2445] 2026-02-19T08:24:21.488204Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489301426395603:2444] 2026-02-19T08:24:21.488231Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489301426395604:2445], channels: 2 2026-02-19T08:24:21.488246Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489301426395603:2444], channels: 1 2026-02-19T08:24:21.488280Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.488307Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.488328Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:21.489310Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489301426395603:2444] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:21.489353Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.489375Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.489404Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489301426395604:2445] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:21.489420Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.489439Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489301426395604:2445], CA [1:7608489301426395603:2444], trace_id# 2026-02-19T08:24:21.490558Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489301426395603:2444] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1280 Tasks { TaskId: 1 CpuTimeUs: 824 FinishTimeMs: 1771489461490 OutputBytes: 1 Tables { TablePath: "/Root/global_async_idx/idx/indexImplTable" ReadRows: 5 ReadBytes: 40 AffectedPartitions: 1 } IngressRows: 5 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 739 WaitOutputTimeUs: 15 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489461490 CreateTimeMs: 1771489461488 UpdateTimeMs: 1771489461490 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:21.490607Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489301426395603:2444] 2026-02-19T08:24:21.490633Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489301426395604:2445], trace_id# 2026-02-19T08:24:21.490653Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489301426395604:2445], trace_id# 2026-02-19T08:24:21.490726Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489301426395604:2445] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 808 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 186 FinishTimeMs: 1771489461490 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 76 BuildCpuTimeUs: 110 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489461490 CreateTimeMs: 1771489461488 UpdateTimeMs: 1771489461490 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:21.490746Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489301426395604:2445] 2026-02-19T08:24:21.490817Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:21.490832Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:21.490848Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489301426395599:2325] TxId: 281474976710673. Ctx: { TraceId: 01khtg00wt6b9dv7ysd65s1k7k, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTk4ZGQ0OGUtYTZlMGNlMGItZGY1MzdlYjAtODZlNzIyZTg=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002088s ReadRows: 5 ReadBytes: 40 RequestUnits# 5 ForceFlag# true trace_id# >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] >> IncrementalBackup::ResetVsUpsertColumnStateSerialization >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] >> KqpBatchUpdate::Large_2 [GOOD] |93.4%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |93.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> KqpBatchUpdate::UpdateOn |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: 2026-02-19T08:23:59.449536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489208303273841:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:59.449604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:59.675742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:59.703763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:59.703862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:59.747956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:59.777329Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:59.780102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489208303273810:2081] 1771489439447965 != 1771489439447968 2026-02-19T08:23:59.861157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:59.861176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:59.861189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:59.861300Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:59.938285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:02.225499Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006db/r3tmp/spilling-tmp-runner/node_1_b9c060f9-9898ef17-845f3bc9-4345de4b, actor: [1:7608489221188177547:2384] 2026-02-19T08:24:02.226188Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006db/r3tmp/spilling-tmp-runner 2026-02-19T08:24:04.180186Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180231Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180247Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180261Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180296Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180320Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180340Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180363Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180383Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180402Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180430Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:04.180467Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzf16dryqz650hmpjxb2h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTY4YjIzMzAtNjY1NmRiZTItNmQ4OGU1Y2ItNmNhZjQ0NWY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:04.180649Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489229778113214:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf16dryqz650hmpjxb2h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTY4YjIzMzAtNjY1NmRiZTItNmQ4OGU1Y2ItNmNhZjQ0NWY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:04.180672Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489229778113214:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzf16dryqz650hmpjxb2h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTY4YjIzMzAtNjY1NmRiZTItNmQ4OGU1Y2ItNmNhZjQ0NWY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:04.180907Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:04.181023Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181064Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181095Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181123Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181164Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181194Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181222Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181256Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181291Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:04.181339Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... :24:23.146936Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489312473907656:2799], CA [2:7608489312473907653:2796], CA [2:7608489312473907654:2797], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.146976Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:7608489312473907656:2799], CA [2:7608489312473907653:2796], CA [2:7608489312473907654:2797], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.147098Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489312473907654:2797] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 21845 DurationUs: 24000 Tasks { TaskId: 3 CpuTimeUs: 12713 FinishTimeMs: 1771489463146 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 10000 ReadBytes: 80000 AffectedPartitions: 2 } IngressRows: 10000 ComputeCpuTimeUs: 12654 BuildCpuTimeUs: 59 WaitOutputTimeUs: 6901 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489463122 CreateTimeMs: 1771489463066 UpdateTimeMs: 1771489463146 } MaxMemoryUsage: 2097152 } trace_id# 2026-02-19T08:24:23.147146Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489312473907654:2797] 2026-02-19T08:24:23.147192Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489312473907656:2799], CA [2:7608489312473907653:2796], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.147236Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:7608489312473907656:2799], CA [2:7608489312473907653:2796], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.147714Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489312473907653:2796] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 24288 DurationUs: 18000 Tasks { TaskId: 2 CpuTimeUs: 14057 FinishTimeMs: 1771489463147 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 10000 ReadBytes: 80000 AffectedPartitions: 2 } IngressRows: 10000 ComputeCpuTimeUs: 13979 BuildCpuTimeUs: 78 WaitOutputTimeUs: 6821 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489463129 CreateTimeMs: 1771489463066 UpdateTimeMs: 1771489463147 } MaxMemoryUsage: 2097152 } trace_id# 2026-02-19T08:24:23.147756Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489312473907653:2796] 2026-02-19T08:24:23.147789Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489312473907656:2799], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.147829Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489312473907656:2799], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.159282Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489312473907655:2798] TaskId# 4 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 284 DurationUs: 11000 Tasks { TaskId: 4 CpuTimeUs: 12415 FinishTimeMs: 1771489463158 IngressRows: 10000 ComputeCpuTimeUs: 12358 BuildCpuTimeUs: 57 WaitOutputTimeUs: 6646 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489463147 CreateTimeMs: 1771489463066 UpdateTimeMs: 1771489463158 } MaxMemoryUsage: 2097152 } trace_id# 2026-02-19T08:24:23.159348Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489312473907656:2799], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.159389Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489312473907656:2799], CA [2:7608489312473907655:2798], trace_id# 2026-02-19T08:24:23.159806Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489312473907655:2798] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 20876 DurationUs: 12000 Tasks { TaskId: 4 CpuTimeUs: 12420 FinishTimeMs: 1771489463159 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 10000 ReadBytes: 80000 AffectedPartitions: 2 } IngressRows: 10000 ComputeCpuTimeUs: 12363 BuildCpuTimeUs: 57 WaitOutputTimeUs: 6646 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489463147 CreateTimeMs: 1771489463066 UpdateTimeMs: 1771489463159 } MaxMemoryUsage: 2097152 } trace_id# 2026-02-19T08:24:23.159862Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489312473907655:2798] 2026-02-19T08:24:23.159901Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489312473907656:2799], trace_id# 2026-02-19T08:24:23.159943Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489312473907656:2799], trace_id# 2026-02-19T08:24:23.160043Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489312473907656:2799] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 46585 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 383 FinishTimeMs: 1771489463159 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 258 BuildCpuTimeUs: 125 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489463159 CreateTimeMs: 1771489463066 UpdateTimeMs: 1771489463159 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:23.160064Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489312473907656:2799] 2026-02-19T08:24:23.160163Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:23.160190Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:23.160218Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489312473907646:2535] TxId: 281474976710769. Ctx: { TraceId: 01khtg02aj8entr3fkdjtjsmkr, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc3MzQxZmUtZjdhOThhY2ItYzNlZjAxM2MtZmNkYjEzMjI=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.133615s ReadRows: 38000 ReadBytes: 304000 RequestUnits# 38000 ForceFlag# true trace_id# >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates |93.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> THealthCheckTest::ShardsLimit800 [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |93.4%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> KqpBatchUpdate::TableNotExists [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:07.245122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:07.352957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:07.369609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:07.369670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:07.370091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:07.589521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:07.589629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:07.647026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489444832143 != 1771489444832147 2026-02-19T08:24:07.653740Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:07.697267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:07.798675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:08.081229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:08.094930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:08.199827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:08.488658Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2026-02-19T08:24:08.490569Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:958:2815], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-02-19T08:24:08.506132Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:958:2815], subTag: 1} TUpsertActor finished in 0.015216s, errors=0 2026-02-19T08:24:08.506413Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2026-02-19T08:24:08.506512Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2026-02-19T08:24:08.507522Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:367: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-02-19T08:24:08.507635Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:401: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 3} started fullscan actor# [1:970:2827] 2026-02-19T08:24:08.507721Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Bootstrap called, sample# 100 2026-02-19T08:24:08.507752Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Connect to# 72075186224037888 called 2026-02-19T08:24:08.507995Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-02-19T08:24:08.508800Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:967:2824], subTag: 1} finished in 0.000691s, sampled# 100, iter finished# 1, oks# 100 2026-02-19T08:24:08.508914Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:417: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 3} received keyCount# 100 2026-02-19T08:24:08.509136Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:446: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:958:2815], subTag: 3} started# 10 actors each with inflight# 1 2026-02-19T08:24:08.509194Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 2} Bootstrap called 2026-02-19T08:24:08.509238Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509301Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 3} Bootstrap called 2026-02-19T08:24:08.509327Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509354Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 4} Bootstrap called 2026-02-19T08:24:08.509391Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509430Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 5} Bootstrap called 2026-02-19T08:24:08.509454Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509480Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 6} Bootstrap called 2026-02-19T08:24:08.509501Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509530Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 7} Bootstrap called 2026-02-19T08:24:08.509560Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509587Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 8} Bootstrap called 2026-02-19T08:24:08.509606Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509624Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 9} Bootstrap called 2026-02-19T08:24:08.509637Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509657Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 10} Bootstrap called 2026-02-19T08:24:08.509677Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.509696Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 11} Bootstrap called 2026-02-19T08:24:08.509731Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-02-19T08:24:08.511535Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 2} session: ydb://session/3?node_id=1&id=MjFhYTNiMWEtOWNmYjE1Y2EtZjgxNTg0MGUtMzQ2ZGFhZGQ= 2026-02-19T08:24:08.514323Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 3} session: ydb://session/3?node_id=1&id=MzY3N2RiNDktMmE4YWY5NzgtY2VlYjJjZjYtYmFhMDJiM2I= 2026-02-19T08:24:08.514411Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 4} session: ydb://session/3?node_id=1&id=YmUwNzNhMTEtNzU4OTNhY2YtMWM2ZGEyZjAtZTJhMTBiZjE= 2026-02-19T08:24:08.517047Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 5} session: ydb://session/3?node_id=1&id=ZTZhOTZmMDUtM2RhM2JiMjAtOGZiOGVjYjgtZjZlNDJkNTc= 2026-02-19T08:24:08.517153Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 6} session: ydb://session/3?node_id=1&id=OTQyMzE3NDktNzJjMWNmNTgtNWJkZGQxY2UtYTI3NzI0Mzk= 2026-02-19T08:24:08.519270Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 7} session: ydb://session/3?node_id=1&id=YTBiZWI4NGMtNjcwMjRjOGUtNTRjNjhkYjEtNzBiMDI0MTY= 2026-02-19T08:24:08.521600Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 8} session: ydb://session/3?node_id=1&id=ZDg3NDk3MmQtYjRiNWM4M2QtMzg2NWMwN2YtYTBlYWQ1OWE= 2026-02-19T08:24:08.523547Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 9} session: ydb://session/3?node_id=1&id=N2U2ZjhjM2UtZGNkMDVhZjUtOTcyZDlmYmMtNTVkNmEzNTc= 2026-02-19T08:24:08.525246Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 10} session: ydb://session/3?node_id=1&id=OTBiOGQ2OGEtY2VkMTEzOTEtYjBlNTczOGItNDgwZWIyZGE= 2026-02-19T08:24:08.529381Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:967:2824], subTag: 11} session: ydb://session/3?node_id=1&id=NDhhZTMxN2MtYWViZGY2OWQtZDUwOWFkNTMtYWMwOGY4NDk= 2026-02-19T08:24:08.535962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you ... d_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:18.717824Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1086:2926] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:18.718696Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1090:2927] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:18.719500Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1095:2929] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-02-19T08:24:18.823788Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1052:2903], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.823879Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1053:2904], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.823918Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1054:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.823967Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1055:2906], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824014Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1056:2907], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824049Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1057:2908], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824082Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1058:2909], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824117Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1065:2916], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824159Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1066:2917], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.824209Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1068:2919], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:24:18.858400Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1192:3015] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:19.223922Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 9} finished in 0.546255s, errors=0 2026-02-19T08:24:19.224127Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 9 { Tag: 9 DurationMs: 546 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:19.238718Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:2121:3437] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:19.603203Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 10} finished in 0.922212s, errors=0 2026-02-19T08:24:19.603499Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 10 { Tag: 10 DurationMs: 922 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:19.618218Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:3028:3843] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:20.036654Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 5} finished in 1.365605s, errors=0 2026-02-19T08:24:20.036866Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1365 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:20.051473Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:3935:4249] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:20.579486Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 11} finished in 1.898415s, errors=0 2026-02-19T08:24:20.579742Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1898 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:20.595125Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4842:4655] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:21.166798Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 8} finished in 2.490811s, errors=0 2026-02-19T08:24:21.167163Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 8 { Tag: 8 DurationMs: 2490 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:21.183835Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:5749:5061] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:21.833182Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 7} finished in 3.158937s, errors=0 2026-02-19T08:24:21.833467Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 7 { Tag: 7 DurationMs: 3158 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:21.850434Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:6656:5467] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:22.598205Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 3} finished in 3.928881s, errors=0 2026-02-19T08:24:22.598488Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 3 { Tag: 3 DurationMs: 3928 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:22.618198Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7563:5873] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:23.436394Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 2} finished in 4.769887s, errors=0 2026-02-19T08:24:23.436824Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 2 { Tag: 2 DurationMs: 4769 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:23.456711Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:8470:6279] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:24.320509Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 4} finished in 5.651091s, errors=0 2026-02-19T08:24:24.320966Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 4 { Tag: 4 DurationMs: 5651 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:24.342126Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:9377:6685] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:25.226109Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:967:2824], subTag: 6} finished in 6.553392s, errors=0 2026-02-19T08:24:25.226689Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished: 6 { Tag: 6 DurationMs: 6553 OperationsOK: 100 OperationsError: 0 } 2026-02-19T08:24:25.226753Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:481: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:958:2815], subTag: 3} finished in 6.562949s, oks# 1000, errors# 0 2026-02-19T08:24:25.227034Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:967:2824] with tag# 3 >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> KqpBatchPEA::PrepareState_AbortExecution [GOOD] |93.4%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:33.700392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:33.701644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:33.705439Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:33.858029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:33.862589Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:33.872232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:303:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:33.872622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:33.873983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:33.885809Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:33.886896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:34.562963Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:34.731483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:34.731609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:34.732090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:34.732160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:34.804489Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:23:34.805024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:34.805427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:34.974556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:35.012627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:36.212105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:36.212181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:36.212216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:36.212680Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:23:45.460564Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:45.461590Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:45.476653Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:45.479625Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:45.481873Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:303:2227], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:45.482008Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:45.482227Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:45.484323Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:702:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:45.484681Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:45.484851Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:45.938747Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:46.029240Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:46.029358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:46.029866Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:46.029949Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:46.081071Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:23:46.081966Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:46.082465Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:46.156672Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:46.186574Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:47.557925Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:47.558013Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:47.558072Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:47.558550Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:23:55.363774Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:55.365089Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:55.367558Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:55.380035Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:55.382710Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:55.384644Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:308:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:55.385287Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:55.385525Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:55.387322Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:55.387474Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:55.765033Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:55.868123Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:55.868254Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:55.868796Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:55.868864Z node 5 :HIVE W ... esActor] ActorId: [7:269:2227], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:01.406165Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:01.406276Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:01.726489Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:01.825096Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:01.825226Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:01.885479Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:01.969252Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:02.863127Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:02.863192Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:02.863226Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:02.863799Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:24:07.076511Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:07.085176Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:07.087831Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:07.088138Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:07.088193Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:07.360687Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:07.360824Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:07.393173Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1771489444060906 != 1771489444060909 2026-02-19T08:24:07.396910Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:07.441399Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:07.522171Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:08.109496Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:08.109551Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:08.109592Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:08.110147Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:24:12.747100Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:12.753101Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:12.755900Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:12.756268Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:12.756342Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:13.104816Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:13.184839Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:13.185010Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:13.244343Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:13.323853Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:14.241249Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:14.241321Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:14.241365Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:14.241903Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:24:22.769376Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:22.770517Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:22.771069Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:24:22.783025Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:22.784711Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:22.786885Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:22.787488Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:22.787683Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:22.789180Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:22.789235Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:23.165133Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:23.290746Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:23.290951Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:23.291503Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:23.291583Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:23.352359Z node 12 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-02-19T08:24:23.353072Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:23.353603Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:23.491925Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:23.520923Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:24.526631Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:24.526704Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:24.526757Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:24.527285Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |93.4%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] Test command err: 2026-02-19T08:24:21.999264Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489300575128955:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:21.999713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:22.241900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:22.265256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:22.265358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:22.329922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489300575128909:2081] 1771489461975709 != 1771489461975712 2026-02-19T08:24:22.346565Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:22.353863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:22.438127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:22.438151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:22.438158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:22.438251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:22.507717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:25.393863Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006cc/r3tmp/spilling-tmp-runner/node_1_b780d675-ee5c5826-254de4fb-3046aa75, actor: [1:7608489317754998946:2310] 2026-02-19T08:24:25.394003Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006cc/r3tmp/spilling-tmp-runner |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |93.4%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> KqpBatchDelete::TableWithIndex |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> KqpBatchDelete::TableWithSyncIndex-EnableIndexStreamWrite [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> KqpBatchPEA::AbortState_UnknownEvent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::PrepareState_AbortExecution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:17.168724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:17.253158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:17.267575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:17.267645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:17.268026Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:17.530673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:17.530823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:17.601005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489454600397 != 1771489454600401 2026-02-19T08:24:17.610646Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:17.659028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:17.773242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:18.116032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:18.116106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:18.116161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:18.116613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:18.128572Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:19.389474Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:19.389584Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:19.389847Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:19.389942Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:19.390176Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:24:19.390328Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:19.390560Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:24:19.390614Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:19.390828Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:19.391232Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1230:2780] 2026-02-19T08:24:19.391336Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1230:2780], channels: 0 2026-02-19T08:24:19.391441Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:19.391571Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:19.391617Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1230:2780] 2026-02-19T08:24:19.391666Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1230:2780], channels: 0 2026-02-19T08:24:19.391737Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1230:2780], trace_id# 2026-02-19T08:24:19.391804Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1230:2780], trace_id# 2026-02-19T08:24:19.391870Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:19.405771Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1230:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:19.405895Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1230:2780], trace_id# 2026-02-19T08:24:19.405970Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1230:2780], trace_id# 2026-02-19T08:24:19.407209Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1230:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1934 Tasks { TaskId: 1 CpuTimeUs: 1134 FinishTimeMs: 1771489459406 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 43 BuildCpuTimeUs: 1091 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489459392 UpdateTimeMs: 1771489459406 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:19.407316Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE5NzM3MWEtNDNiNThiYjUtYWVlMDg1NjEtZjg5MDNlNWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1230:2780] 2026-02-19T08:24:19.407413Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1227:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtfzz1t6msvzc5pznm43gbw, Database: /Root, SessionId: yd ... ECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value3"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '3221225469)) $1) (AsStruct '('"Key" (Uint32 '3221225470)) $1) (AsStruct '('"Key" (Uint32 '3221225471)) $1) (AsStruct '('"Key" (Uint32 '3221225472)) $1) (AsStruct '('"Key" (Uint32 '3221225473)) $1)))) )))) ) 2026-02-19T08:24:26.734040Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:26.734184Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:26.734450Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1293:2780] 2026-02-19T08:24:26.734505Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1293:2780], channels: 0 2026-02-19T08:24:26.734576Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:26.734619Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:26.734662Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1293:2780] 2026-02-19T08:24:26.734703Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1293:2780], channels: 0 2026-02-19T08:24:26.734758Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:1293:2780], trace_id# 2026-02-19T08:24:26.734811Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1293:2780], trace_id# 2026-02-19T08:24:26.734862Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:26.735982Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1293:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:26.736041Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:1293:2780], trace_id# 2026-02-19T08:24:26.736084Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1293:2780], trace_id# 2026-02-19T08:24:26.736885Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1293:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 933 Tasks { TaskId: 1 CpuTimeUs: 452 FinishTimeMs: 1771489466736 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 22 BuildCpuTimeUs: 430 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489466735 UpdateTimeMs: 1771489466736 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:26.736941Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1293:2780] 2026-02-19T08:24:26.737000Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1289:2780] trace_id# 2026-02-19T08:24:26.737053Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000933s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:24:26.743462Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:26.743542Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1290:2780] TxId: 281474976715664. Ctx: { TraceId: 01khtg0690dvj3jmntf88qkztj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxMmVhZS02NmYyM2Q3Ny1iY2E0MjEyYi1mNTczNDg0OQ==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:26.893871Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:108} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH, ActivePartitions: 0, Message: Start resolving table partitions. 2026-02-19T08:24:26.894014Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:484} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: Filling table meta info. TableId# [72057594046644480:42:1] KeyColumnsCount# 1 2026-02-19T08:24:26.894099Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:497} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: Resolving table partitioning. TableId# [72057594046644480:42:1] KeyColumnsCount# 1 2026-02-19T08:24:26.894951Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:174} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: Got abort execution. Sender# [2:1316:3065] FromSessionActor# false StatusCode# ABORTED Issues# {
: Error: Test abort execution } 2026-02-19T08:24:26.895085Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:853} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: First error occurred. Status# ABORTED Issues# {
: Error: Test abort execution } 2026-02-19T08:24:26.895175Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:630} ActorId: [2:1315:3056], ActorState: PrepareState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: Entering AbortState, trying to finish execution. ActivePartitionsCount# 0 ReturnStatus# ABORTED 2026-02-19T08:24:26.895285Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1315:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# ABORTED Issues# [ {
: Error: Test abort execution } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:24:26.895727Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg06c8dyp8npy48wrh6prd, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NDQ3MTA0MzYtY2E1NGRmNTMtZDYxNmMzZDYtMzBmZGQ3ZTM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:26.896084Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1318:3056] TxId: 281474976715665. Ctx: { TraceId: 01khtg06c8dyp8npy48wrh6prd, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NDQ3MTA0MzYtY2E1NGRmNTMtZDYxNmMzZDYtMzBmZGQ3ZTM=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1317:3056] trace_id# 2026-02-19T08:24:26.896332Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1318:3056] TxId: 281474976715665. Ctx: { TraceId: 01khtg06c8dyp8npy48wrh6prd, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NDQ3MTA0MzYtY2E1NGRmNTMtZDYxNmMzZDYtMzBmZGQ3ZTM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:26.896407Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1318:3056] TxId: 281474976715665. Ctx: { TraceId: 01khtg06c8dyp8npy48wrh6prd, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NDQ3MTA0MzYtY2E1NGRmNTMtZDYxNmMzZDYtMzBmZGQ3ZTM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpBatchPEA::ExecuteState_ChildExecuterAbort [GOOD] >> KqpBatchPEA::ExecuteState_AbortDuringRetry >> KqpBatchUpdate::Large_1 |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |93.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithSyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:24:17.420941Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489283452730263:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:17.421684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:17.642267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:17.661413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:17.661503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:17.716524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:17.734018Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:17.753685Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489283452730231:2081] 1771489457419371 != 1771489457419374 2026-02-19T08:24:17.814923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:17.814948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:17.814961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:17.815038Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:17.822874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:20.303074Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d0/r3tmp/spilling-tmp-runner/node_1_d8ff0678-35bd2b53-35314c97-9e4be51f, actor: [1:7608489296337632966:2309] 2026-02-19T08:24:20.303284Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d0/r3tmp/spilling-tmp-runner 2026-02-19T08:24:20.996729Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:20.996798Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:20.997008Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:20.997048Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:20.997274Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 2 2026-02-19T08:24:20.997415Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:20.997533Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindSyncIndexTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL, Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:20.997754Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '"2")) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (return (Iterator (AsList $4 $5 $6 $7 $8))) )))) ) 2026-02-19T08:24:20.997793Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:20.998085Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:20.998495Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489296337633294:2325] 2026-02-19T08:24:20.998530Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489296337633294:2325], channels: 0 2026-02-19T08:24:20.998584Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:20.998607Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:20.998624Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489296337633294:2325] 2026-02-19T08:24:20.998644Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489296337633294:2325], channels: 0 2026-02-19T08:24:20.998712Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489296337633294:2325], trace_id# 2026-02-19T08:24:20.998752Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489296337633294:2325], trace_id# 2026-02-19T08:24:20.998789Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:21.011498Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489296337633294:2325] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:21.011600Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489296337633294:2325], trace_id# 2026-02-19T08:24:21.011648Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489296337633294:2325], trace_id# 2026-02-19T08:24:21.013195Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489296337633291:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg00k559p9yw2asb7kk8w8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwNzg4YWMtMjQwMDk0YWYtNDY3MTMwMzYtOTExMGNlODA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeAct ... 2026-02-19T08:24:27.856203Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489327710776220:2401], channels: 2 2026-02-19T08:24:27.856232Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489327710776219:2400], channels: 1 2026-02-19T08:24:27.856295Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:27.856311Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:27.856325Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [2:7608489327710776220:2401] 2026-02-19T08:24:27.856346Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [2:7608489327710776219:2400] 2026-02-19T08:24:27.856376Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489327710776220:2401], channels: 2 2026-02-19T08:24:27.856395Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489327710776219:2400], channels: 1 2026-02-19T08:24:27.856435Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.856473Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.856496Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:27.857381Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489327710776219:2400] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:27.857420Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.857449Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.857481Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489327710776220:2401] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:27.857529Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.857553Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489327710776220:2401], CA [2:7608489327710776219:2400], trace_id# 2026-02-19T08:24:27.858589Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489327710776219:2400] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1209 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 196 FinishTimeMs: 1771489467858 OutputBytes: 1 Tables { TablePath: "/Root/global_sync_idx/idx/indexImplTable" AffectedPartitions: 1 } ComputeCpuTimeUs: 71 BuildCpuTimeUs: 125 WaitOutputTimeUs: 60 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489467857 CreateTimeMs: 1771489467856 UpdateTimeMs: 1771489467858 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:27.858650Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489327710776219:2400] 2026-02-19T08:24:27.858691Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489327710776220:2401], trace_id# 2026-02-19T08:24:27.858716Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489327710776220:2401], trace_id# 2026-02-19T08:24:27.858831Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489327710776220:2401] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 573 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 201 FinishTimeMs: 1771489467858 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 110 BuildCpuTimeUs: 91 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489467858 CreateTimeMs: 1771489467857 UpdateTimeMs: 1771489467858 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:27.858850Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489327710776220:2401] 2026-02-19T08:24:27.858944Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:27.858965Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:27.859006Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489327710776215:2327] TxId: 281474976715666. Ctx: { TraceId: 01khtg072771reyjpxqcnp5113, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YzFmMDQxMTYtNTI0Yjg5ZTgtNjFkZjk3NzEtZGRiMDcxNGM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001782s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent >> KqpBatchDelete::UnknownColumn [GOOD] >> KqpBatchDelete::TableWithVectorIndex >> BasicUsage::KeyedWriteSession_NoAutoPartitioning_HashPartitionChooser [GOOD] >> BasicUsage::KeyedWriteSession_NoAutoPartitioning_BoundPartitionChooser >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table [GOOD] >> TxUsage::WriteToTopic_Demo_12_Query [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |93.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:24:22.235630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489308577475491:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:22.235980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:22.517447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:22.555157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:22.555262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:22.614026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489308577475453:2081] 1771489462232051 != 1771489462232054 2026-02-19T08:24:22.625431Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:22.631969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:22.754120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:22.754140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:22.754146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:22.754260Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:22.770293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:25.393831Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006ca/r3tmp/spilling-tmp-runner/node_1_f53b8d3e-a059493-d8f762ce-2c0007bb, actor: [1:7608489321462379182:2387] 2026-02-19T08:24:25.394861Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006ca/r3tmp/spilling-tmp-runner 2026-02-19T08:24:27.901530Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901571Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901580Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901590Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901599Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901610Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901621Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901635Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901643Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901653Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901664Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.901739Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:24:27.901777Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:24:27.901805Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"Key1" (Just (Uint32 '101)))) (let $2 '('"Key2" (Just (String '"One")))) (let $3 '('"Name" (Just (String '"Name1")))) (let $4 (AsStruct $1 $2 $3 '('"Value2" (Just (String '"Value21"))))) (let $5 '('"Key2" (Just (String '"Two")))) (let $6 (AsStruct $1 $5 $3 '('"Value2" (Just (String '"Value22"))))) (let $7 '('"Name" (Just (String '"Name3")))) (let $8 (AsStruct $1 '('"Key2" (Just (String '"Three"))) $7 '('"Value2" (Just (String '"Value23"))))) (let $9 '('"Name" (Just (String '"Name2")))) (let $10 (AsStruct '('"Key1" (Just (Uint32 '102))) $2 $9 '('"Value2" (Just (String '"Value24"))))) (let $11 (AsStruct '('"Key1" (Just (Uint32 '103))) $2 $3 '('"Value2" (Just (String '"Value25"))))) (let $12 (AsStruct '('"Key1" (Just (Uint32 '104))) $2 $7 '('"Value2" (Just (String '"Value26"))))) (let $13 '('"Key1" (Just (Uint32 '105)))) (let $14 (AsStruct $13 $2 $9 '('"Value2" (Just (String '"Value27"))))) (let $15 (AsStruct $13 $5 '('"Name" (Just (String '"Name4"))) '('"Value2" (Just (String '"Value28"))))) (let $16 (AsStruct '('"Key1" (Just (Uint32 '106))) $2 $7 '('"Value2" (Just (String '"Value29"))))) (let $17 (AsStruct '('"Key1" (Just (Uint32 '108))) $2 '('"Name" (Nothing (OptionalType (DataType 'String)))) '('"Value2" (Just (String '"Value31"))))) (return (ToStream (Just (AsList $4 $6 $8 $10 $11 $12 $14 $15 $16 $17)))) )))) ) trace_id# 2026-02-19T08:24:27.901829Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,1] ast# ( (return (lambda '() (block '( (let $1 '('"Fk21" (Just (Uint32 '101)))) (let $2 '('"Fk22" (Just (String '"One")))) (let $3 '('"Value" (Just (String '"Value1")))) (let $4 (AsStruct $1 $2 '('"Key" (Just (Int32 '1))) $3)) (let $5 '('"Fk22" (Just (String '"Two")))) (let $6 (AsStruct '('"Fk21" (Just (Uint32 '102))) $5 '('"Key" (Just (Int32 '2))) $3)) (let $7 '('"Value" (Just (String '"Value2")))) (let $8 (AsStruct '('"Fk21" (Just (Uint32 '103))) $2 '('"Key" (Just (Int32 '3))) $7)) (let $9 (AsStruct '('"Fk21" (Just (Uint32 '104))) $5 '('"Key" (Just (Int32 '4))) $7)) (let $10 '('"Value" (Just (String '"Value3")))) (let $11 (AsStruct '('"Fk21" (Just (Uint32 '105))) $2 '('"Key" (Just (Int32 '5))) $10)) (let $12 (AsStruct '('"Fk21" (Just (Uint32 '106))) $5 '('"Key" (Just (Int32 '6))) $10)) (let $13 (AsStruct '('"Fk21" (Just (Uint32 '107))) $2 '('"Key" (Just (Int32 '7))) '('"Value" (Just (String '"Value4"))))) (let $14 (AsStruct '('"Fk21" (Just (Uint32 '108))) $2 '('"Key" (Just (Int32 '8))) '('"Value" (Just (String '"Value5"))))) (let $15 (AsStruct $1 $5 '('"Key" (Just (Int32 '9))) $3)) (return (ToStream (Just (AsList $4 $6 $8 $9 $11 $12 $13 $14 $15)))) )))) ) trace_id# 2026-02-19T08:24:27.901879Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,2] ast# ( (return (lambda '() (block '( (let $1 '('"Ts" (Just (Int64 '0)))) (let $2 (AsStruct '('"App" (Just (Utf8 '"apache"))) '('"Host" (Just (Utf8 '"front-42"))) '('"Message" (Just (Utf8 '" GET /index.html HTTP/1.1"))) $1)) (let $3 '('"App" (Just (Utf8 '"nginx")))) (let $4 '('"Ts" (Just (Int64 '1)))) (let $5 (AsStruct $3 '('"Host" (Just (Utf8 '"nginx-10"))) '('"Message" (Just (Utf8 '"GET /index.html HTTP/1.1"))) $4)) (let $6 '('"Host" (Just (Utf8 '"nginx-23")))) (let $7 '('"Ts" (Just (Int64 '2)))) (let $8 (AsStruct $3 $6 '('"Message" (Just (Utf8 '"PUT /form HTTP/1.1"))) $7)) (let $9 '('"Ts" (Just (Int64 '3)))) (let $10 (AsStruct $3 $6 '('"Message" (Just (Utf8 '"GET /cat.jpg HTTP/1.1"))) $9)) (let $11 '('"App" (Just (Utf8 '"kikimr-db")))) (let $12 (AsStruct $11 '('"Host" (Just (Utf8 '"kikimr-db-10"))) '('"Message" (Just (Utf8 '"Write Data"))) $4)) (let $13 '('"Host" (Just (Utf8 '"kikimr-db-21")))) (let $14 (AsStruct $11 $13 '('"Message" (Just (Utf8 '"Read Data"))) $7)) (let $15 (AsStruct $11 $13 '('"Message" (Just (Utf8 '"Stream Read Data"))) $9)) (let $16 (AsStruct $11 '('"Host" (Just (Utf8 '"kikimr-db-53"))) '('"Message" (Just (Utf8 '"Discover"))) '('"Ts" (Just (Int64 '4))))) (let $17 (AsStruct '('"App" (Just (Utf8 '"ydb"))) '('"Host" (Just (Utf8 '"ydb-1000"))) '('"Message" (Just (Utf8 '"some very very very very long string"))) $1)) (return (ToStream (Just (AsList $2 $5 $8 $10 $12 $14 $15 $16 $17)))) )))) ) trace_id# 2026-02-19T08:24:27.901913Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,3] ast# ( (return (lambda '() (block '( (let $1 '('"Comment" (Just (String '"None")))) (let $2 '('"Group" (Just (Uint32 '1)))) (let $3 (AsStruct '('"Amount" (Just (Uint64 '"3500"))) $1 $2 '('"Name" (Just (String '"Anna"))))) (let $4 (AsStruct '('"Amount" (Just (Uint64 '"300"))) $1 $2 '('"Name" (Just (String '"Paul"))))) (let $5 (AsStruct '('"Amount" (Just (Uint64 '"7200"))) $1 '('"Group" (Just (Uint32 '2))) '('"Name" (Just (String '"Tony"))))) (return (ToStream (Just (AsList $3 $4 $5)))) )))) ) trace_id# 2026-02-19T08:24:27.901931Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,4] ast# ( (return (lambda '() (ToStream (Just (AsList (AsStruct '('"Key" (Just (String '1))) '('"Value" (Just (String '"One")))) (AsStruct '('"Key" (Just (String '2))) '('"Value" (Just (String '"Two"))))))))) ) trace_id# 2026-02-19T08:24:27.901957Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,5] ast# ( (return (lambda '() (ToStream (Just (AsList (AsStruct '('"Key" (Just (Uint64 '1))) '('"Value" (Just (String '"One")))) (AsStruct '('"Key" (Just (Uint64 '2))) '('"Value" (Just (String '"Two"))))))))) ) trace_id# 2026-02-19T08:24:27.901977Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,6] ast# ( (return (lambda '() (block '( (let $1 '('"Col1" (Just (Uint32 '0)))) (let $2 '('"Col2" (Just (Uint64 '1)))) (let $3 (Just (Int64 '0))) (let $4 '('"Col3" $3)) (let $5 (Just (Int64 '3))) (let $6 (AsStruct $1 $2 $4 '('"Col4" $5))) (let $7 '('"Col1" (Just (Uint32 '1)))) (let $8 (Just (Int64 '1))) (let $9 '('"Col4" $8)) (let $10 (AsStruct $7 $2 $4 $9)) (let $11 '('"Col3" $8)) (let $12 '('"Col4" $3)) (let $13 (AsStruct $7 $2 $11 $12)) (let $14 (Just (Int64 '2))) (let $15 '('"Col3" $14)) (let $16 (AsStruct $7 $2 $15 $9)) (let $17 '('"Col1" (Just (U ... shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:28.032639Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037921 state# Executing trace_id# 2026-02-19T08:24:28.032671Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:24:28.032684Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:28.032717Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 5 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037921 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:28.032742Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037921 status# COMPLETE error# trace_id# 2026-02-19T08:24:28.032786Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:28.032802Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:28.032815Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:24:28.032837Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:28.032860Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:28.032885Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037922 status# COMPLETE error# trace_id# 2026-02-19T08:24:28.032915Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:28.032928Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:28.032941Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:28.032975Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 3 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:28.033009Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037923 status# COMPLETE error# trace_id# 2026-02-19T08:24:28.033035Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:28.033066Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:28.033085Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 2 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), trace_id# 2026-02-19T08:24:28.033115Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037924 status# COMPLETE error# trace_id# 2026-02-19T08:24:28.033135Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:28.033154Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037925 (Executing), trace_id# 2026-02-19T08:24:28.033174Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037925 status# COMPLETE error# trace_id# 2026-02-19T08:24:28.033316Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.033351Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:28.033389Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489330052314859:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg05rw5jjpxanjej06vjq9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ5ZDUyNS1lNzNkNzkxMy04NDQ4Nzk3Yy0yOWQxM2QwYw==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> KqpBatchUpdate::Large_3 >> KqpBatchDelete::Large_2 [GOOD] >> KqpBatchDelete::Large_3 >> KqpBatchUpdate::TableWithUniqueSyncIndex+EnableIndexStreamWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::AbortState_UnknownEvent [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:18.974506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:19.065853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:19.085973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:19.086039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:19.086431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:19.329954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:19.330103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:19.410299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489456654829 != 1771489456654833 2026-02-19T08:24:19.419752Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:19.467225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:19.600827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:19.940865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:19.940939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:19.940997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:19.941517Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:19.954588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:21.221521Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.221600Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:21.221792Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:21.221855Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:21.222016Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:24:21.222144Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.222276Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:24:21.222325Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:21.222501Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:21.222996Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1229:2780] 2026-02-19T08:24:21.223036Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1229:2780], channels: 0 2026-02-19T08:24:21.223137Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:21.223175Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:21.223218Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1229:2780] 2026-02-19T08:24:21.223261Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1229:2780], channels: 0 2026-02-19T08:24:21.223315Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1229:2780], trace_id# 2026-02-19T08:24:21.223358Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1229:2780], trace_id# 2026-02-19T08:24:21.223402Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:21.233768Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1229:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:21.233877Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1229:2780], trace_id# 2026-02-19T08:24:21.233942Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1229:2780], trace_id# 2026-02-19T08:24:21.234842Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1229:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1441 Tasks { TaskId: 1 CpuTimeUs: 861 FinishTimeMs: 1771489461234 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 30 BuildCpuTimeUs: 831 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489461223 UpdateTimeMs: 1771489461234 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:21.234916Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmMxNTExMzUtNTI2MjgwZDYtMmExYTlmZDItNmY1ZTEzZjE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1229:2780] 2026-02-19T08:24:21.234978Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg00vc90y3hjeh0qg33et3, Database: /Root, SessionId: ydb: ... EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1323:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.344796Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1323:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:28.344901Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1319:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.344946Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1319:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.345042Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1319:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1316:3056] trace_id# 2026-02-19T08:24:28.345112Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1319:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.345162Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1319:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:28.345252Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1325:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.345297Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1325:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.345372Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1325:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1316:3056] trace_id# 2026-02-19T08:24:28.345420Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1325:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.345450Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1325:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:28.345521Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1321:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.345577Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1321:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:28.345666Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1321:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1316:3056] trace_id# 2026-02-19T08:24:28.345733Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1321:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.345779Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1321:3056] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:28.345973Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1323:3056] PartitionIndex# 2 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:28.346077Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 3 TotalPartitions# 4 2026-02-19T08:24:28.346286Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Got tx response. Sender# [2:1319:3056] PartitionIndex# 0 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:28.346330Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:24:28.346462Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Got tx response. Sender# [2:1325:3056] PartitionIndex# 3 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:28.346524Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:24:28.346634Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [2:1321:3056] PartitionIndex# 1 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:28.346757Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1316:3056], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# ABORTED Issues# [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:24:28.347402Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:28.347816Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1331:3056] TxId: 281474976715669. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1330:3056] trace_id# 2026-02-19T08:24:28.348043Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1331:3056] TxId: 281474976715669. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:28.348110Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1331:3056] TxId: 281474976715669. Ctx: { TraceId: 01khtg07rqbcqg4yzz4xf62jdk, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTJkMGMyYmYtYjE3OTMwMmYtZGFmZTcwMTctZjdiNGExYjY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TxUsage::WriteToTopic_Demo_13_Table >> KqpBatchDelete::TableWithUniqueSyncIndex+EnableIndexStreamWrite [GOOD] >> KqpBatchDelete::TableWithUniqueSyncIndex-EnableIndexStreamWrite >> KqpBatchPEA::ExecuteState_MinKeyErrorIssues >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithUniqueSyncIndex+EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:24:23.660823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489310579653888:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:23.661495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:23.712103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:23.989817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:23.989932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:23.995634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:24.042014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:24.109415Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:24.112804Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489310579653768:2081] 1771489463655737 != 1771489463655740 2026-02-19T08:24:24.128767Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:24.128790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:24.128796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:24.128908Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:24.201264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:26.787256Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c5/r3tmp/spilling-tmp-runner/node_1_940edad4-98db2488-9c04ec0-5ae4d734, actor: [1:7608489323464556511:2310] 2026-02-19T08:24:26.787436Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c5/r3tmp/spilling-tmp-runner 2026-02-19T08:24:27.503880Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.503949Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:27.504055Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:27.504073Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:27.504247Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 2 2026-02-19T08:24:27.504362Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.504413Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindSyncIndexTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL, Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.504579Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v2" (Just (String '"456")))) (let $2 '('"v3" (Just (String '"789")))) (let $3 (AsStruct '('"k" (Int32 '1)) '('"v1" (Just (String '"123"))) $1 $2)) (let $4 (AsStruct '('"k" (Int32 '"2")) '('"v1" (Just (String '"124"))) $1 $2)) (let $5 (AsStruct '('"k" (Int32 '"3")) '('"v1" (Just (String '"125"))) $1 $2)) (let $6 (AsStruct '('"k" (Int32 '"4")) '('"v1" (Just (String '"126"))) $1 $2)) (let $7 (AsStruct '('"k" (Int32 '"5")) '('"v1" (Just (String '"127"))) $1 $2)) (return (Iterator (AsList $3 $4 $5 $6 $7))) )))) ) 2026-02-19T08:24:27.504594Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:27.504790Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1771489467328} 2026-02-19T08:24:27.505143Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489327759524138:2326] 2026-02-19T08:24:27.505170Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489327759524138:2326], channels: 0 2026-02-19T08:24:27.505226Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:27.505240Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:27.505258Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489327759524138:2326] 2026-02-19T08:24:27.505273Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489327759524138:2326], channels: 0 2026-02-19T08:24:27.505320Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489327759524138:2326], trace_id# 2026-02-19T08:24:27.505355Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489327759524138:2326], trace_id# 2026-02-19T08:24:27.505378Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:27.515919Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489327759524138:2326] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:27.515982Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489327759524138:2326], trace_id# 2026-02-19T08:24:27.516041Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489327759524135:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg06yr7n28dd5xc5t1vszv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489327759524138:2326], trace_id# 2026-02-19T08:24:27.517641Z node 1 :KQP_EXECUTER ... e 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489336349459040:2446], channels: 1 2026-02-19T08:24:29.754600Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489336349459041:2447], channels: 2 2026-02-19T08:24:29.754659Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:29.754674Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:29.754685Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489336349459041:2447] 2026-02-19T08:24:29.754703Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489336349459040:2446] 2026-02-19T08:24:29.754719Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489336349459040:2446], channels: 1 2026-02-19T08:24:29.754735Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489336349459041:2447], channels: 2 2026-02-19T08:24:29.754765Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.754793Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.754812Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:29.755319Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489336349459040:2446] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:29.755357Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.755380Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.755492Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489336349459041:2447] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:29.755514Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.755535Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489336349459040:2446], CA [1:7608489336349459041:2447], trace_id# 2026-02-19T08:24:29.757144Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489336349459041:2447] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 857 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 240 FinishTimeMs: 1771489469756 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 158 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489469756 CreateTimeMs: 1771489469754 UpdateTimeMs: 1771489469756 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:29.757177Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489336349459041:2447] 2026-02-19T08:24:29.757202Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489336349459040:2446], trace_id# 2026-02-19T08:24:29.757221Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489336349459040:2446], trace_id# 2026-02-19T08:24:29.757316Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489336349459040:2446] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1090 Tasks { TaskId: 1 CpuTimeUs: 693 FinishTimeMs: 1771489469756 OutputBytes: 1 Tables { TablePath: "/Root/global_unique_sync_idx/idx/indexImplTable" ReadRows: 5 ReadBytes: 40 AffectedPartitions: 1 } IngressRows: 5 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 613 WaitOutputTimeUs: 17 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489469756 CreateTimeMs: 1771489469754 UpdateTimeMs: 1771489469756 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:29.757344Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489336349459040:2446] 2026-02-19T08:24:29.757412Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:29.757427Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:29.757450Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489336349459036:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg08z2f7b38nr8s9tgdtyy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0MzExNDEtMzk3ZjQ0NDMtNGQ5MmJhOS1hMTQ4YWIzYQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001947s ReadRows: 5 ReadBytes: 40 RequestUnits# 5 ForceFlag# true trace_id# >> KqpBatchDelete::DeleteOn >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> KqpBatchUpdate::UpdateOn [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] >> TxUsage::WriteToTopic_Demo_43_Table >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query >> KqpBatchUpdate::ManyPartitions_3 >> KqpBatchUpdate::UnknownColumn >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] Test command err: 2026-02-19T08:23:51.239242Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:51.239383Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:51.267016Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:51.267125Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:51.282055Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:51.282640Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=684671962583720842, session=0, seqNo=0) 2026-02-19T08:23:51.282818Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:51.305031Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=684671962583720842, session=1) 2026-02-19T08:23:51.305914Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:51.306088Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:51.306249Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:51.318696Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-02-19T08:23:51.319044Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:51.342479Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=222) 2026-02-19T08:23:51.343080Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=2037729455323726637, name="Lock1") 2026-02-19T08:23:51.343186Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=2037729455323726637) 2026-02-19T08:23:51.362401Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:51.362524Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:51.362977Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:51.363553Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:51.414450Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:51.414593Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:51.415014Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:190:2203], cookie=14848341635064241429, name="Lock1") 2026-02-19T08:23:51.415101Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:190:2203], cookie=14848341635064241429) 2026-02-19T08:23:51.941371Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:51.941501Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:51.954495Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:51.954646Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:51.993776Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:51.994269Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=18154140245709838766, session=0, seqNo=0) 2026-02-19T08:23:51.994404Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:52.007171Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=18154140245709838766, session=1) 2026-02-19T08:23:52.007516Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=4845662244981193443, session=0, seqNo=0) 2026-02-19T08:23:52.007693Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:52.022808Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=4845662244981193443, session=2) 2026-02-19T08:23:52.023196Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:52.035869Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=111) 2026-02-19T08:23:52.036554Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:148:2170], cookie=535521793608608046, name="Sem1", limit=1) 2026-02-19T08:23:52.036778Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:52.050481Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:148:2170], cookie=535521793608608046) 2026-02-19T08:23:52.050888Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=333, session=1, semaphore="Sem1" count=100500) 2026-02-19T08:23:52.065169Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=333) 2026-02-19T08:23:52.065493Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=222, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:52.065642Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:23:52.065841Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=333, session=2, semaphore="Sem1" count=1) 2026-02-19T08:23:52.085414Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=222) 2026-02-19T08:23:52.085515Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=333) 2026-02-19T08:23:52.086140Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:158:2180], cookie=6031249859352012594, name="Sem1") 2026-02-19T08:23:52.086239Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:158:2180], cookie=6031249859352012594) 2026-02-19T08:23:52.086690Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:161:2183], cookie=1710653611868957412, name="Sem1") 2026-02-19T08:23:52.086761Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:161:2183], cookie=1710653611868957412) 2026-02-19T08:23:52.087173Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[2:164:2186], cookie=11689895345837993614, name="Sem1", force=0) 2026-02-19T08:23:52.100367Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[2:164:2186], cookie=11689895345837993614) 2026-02-19T08:23:52.101005Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[2:169:2191], cookie=6671958743212860712, name="Sem1", force=1) 2026-02-19T08:23:52.101113Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-02-19T08:23:52.113849Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[2:169:2191], cookie=6671958743212860712) 2026-02-19T08:23:52.662587Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:52.662716Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:52.689347Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:52.689482Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:52.730411Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:52.731043Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=9622478644442973550, session=0, seqNo=0) 2026-02-19T08:23:52.731222Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:52.748908Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=9622478644442973550, session=1) 2026-02-19T08:23:52.749222Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=18311091754696067439, session=0, seqNo=0) 2026-02-19T08:23:52.749376Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:52.764313Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=18311091754696067439, session=2) 2026-02-19T08:23:52.764840Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:148:2170], cookie=6380514668788174591, name="Sem1", limit=1) 2026-02-19T08:23:52.764992Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-02-19T08:23:52.777444Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:148:2170], cookie=6380514668788174591) 2026-02-19T08:23:52.777862Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-02-19T08:23:52.778036Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:23:52.778246Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-02-19T08:23:52.791027Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (send ... 117522256025386) 2026-02-19T08:24:16.618611Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-02-19T08:24:16.618810Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:24:16.631624Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-02-19T08:24:16.647711Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:16.647845Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:16.648421Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:16.649200Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:16.702144Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:16.702329Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-02-19T08:24:16.702845Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:189:2202], cookie=17156153067831543150, name="Sem1") 2026-02-19T08:24:16.702958Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:189:2202], cookie=17156153067831543150) 2026-02-19T08:24:17.139050Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:17.151428Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:17.515376Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:17.534685Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:17.887397Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:17.899103Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:18.260227Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:18.272199Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:18.639672Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:18.652116Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:18.992172Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:19.004404Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:19.338596Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:19.355718Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:19.732475Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:19.746497Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:20.108382Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:20.120552Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:20.515691Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:20.530077Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:20.899366Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:20.914820Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:21.279160Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:21.291872Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:21.677857Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:21.690345Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:22.061209Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:22.074460Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:22.486770Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:22.502964Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:22.895077Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:22.910579Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:23.286411Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:23.298772Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:23.678042Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:23.696271Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:24.084272Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:24.096764Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:24.499175Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:24.511950Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:24.882568Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:24.894729Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:25.291202Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:25.303479Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:25.672819Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:25.685284Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:26.070729Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:26.084270Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:26.490083Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:26.507024Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:26.897244Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:26.914624Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:27.296623Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:27.309345Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:27.680565Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:27.693736Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:28.058015Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:28.069922Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:28.511732Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:28.525980Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:28.900454Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:28.912985Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:29.279727Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:29.291843Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:29.670675Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:29.683447Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:30.058757Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:30.070650Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:30.467129Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:30.479591Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:30.862143Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:30.882321Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:31.266623Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:31.283011Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:31.653981Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:31.666165Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:32.036817Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:32.049087Z node 5 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:32.412441Z node 5 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-02-19T08:24:32.412537Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:24:32.412602Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-02-19T08:24:32.424652Z node 5 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-02-19T08:24:32.435655Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:455:2414], cookie=7480600936247794735, name="Sem1") 2026-02-19T08:24:32.435790Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:455:2414], cookie=7480600936247794735) >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: 2026-02-19T08:24:26.018533Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489325286900557:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:26.018612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:26.400042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:26.400165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:26.426536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:26.440335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:26.444033Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489325286900526:2081] 1771489466016743 != 1771489466016746 2026-02-19T08:24:26.461585Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:26.570382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:26.570407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:26.570413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:26.570500Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:26.685571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:29.354629Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c0/r3tmp/spilling-tmp-runner/node_1_6911926-3cd94cec-4c3ebf36-9fab94e9, actor: [1:7608489338171804248:2385] 2026-02-19T08:24:29.354817Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c0/r3tmp/spilling-tmp-runner 2026-02-19T08:24:31.516956Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517016Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517050Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517105Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517144Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517209Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517253Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517289Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517335Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517377Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517421Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.517484Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:31.517757Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:31.517795Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:31.518173Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:24:31.518342Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518392Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518462Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518516Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518577Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518631Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518740Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518792Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518845Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.518890Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From ... m, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489346761739931:2529], CA [1:7608489346761739932:2530], CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537103Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489346761739931:2529], CA [1:7608489346761739932:2530], CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537162Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489346761739931:2529] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 791 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 476 FinishTimeMs: 1771489471534 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 461 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471525 UpdateTimeMs: 1771489471534 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:31.537172Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489346761739931:2529] 2026-02-19T08:24:31.537188Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489346761739932:2530], CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537203Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489346761739932:2530], CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537407Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489346761739932:2530] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1269 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 402 FinishTimeMs: 1771489471534 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 390 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471525 UpdateTimeMs: 1771489471534 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.537425Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489346761739932:2530] 2026-02-19T08:24:31.537460Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537485Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489346761739933:2531], CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537659Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489346761739933:2531] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1350 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 927 FinishTimeMs: 1771489471534 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 16 BuildCpuTimeUs: 911 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471527 UpdateTimeMs: 1771489471534 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.537681Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489346761739933:2531] 2026-02-19T08:24:31.537704Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.537894Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489346761739934:2532], CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.538246Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489346761739934:2532] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1291 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 948 FinishTimeMs: 1771489471535 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 922 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471528 UpdateTimeMs: 1771489471535 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.538278Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489346761739934:2532] 2026-02-19T08:24:31.538301Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.538319Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489346761739935:2533], trace_id# 2026-02-19T08:24:31.538673Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489346761739935:2533] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1991 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1488 FinishTimeMs: 1771489471535 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 32 BuildCpuTimeUs: 1456 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471529 UpdateTimeMs: 1771489471535 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.538693Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489346761739935:2533] 2026-02-19T08:24:31.538727Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489346761739921:2402] trace_id# 2026-02-19T08:24:31.538756Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.011201s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:24:31.581091Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:31.581124Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489346761739922:2402] TxId: 281474976715672. Ctx: { TraceId: 01khtg09kpd4fcccah69e4gxw9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzFhZTVlYTktNmZiN2I5MTQtN2Q5M2NiNy1mYjc3NjJm, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpBatchDelete::TableWithVectorIndex [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:24:26.172483Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489323355809655:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:26.172597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:26.188560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:26.485338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:26.485457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:26.498208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:26.536338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:26.598437Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:26.599821Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489323355809545:2081] 1771489466159325 != 1771489466159328 2026-02-19T08:24:26.666623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:26.666655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:26.666662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:26.666800Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:29.550858Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c2/r3tmp/spilling-tmp-runner/node_1_e33654b2-e046e5fb-e5df118e-8106c70b, actor: [1:7608489336240713266:2385] 2026-02-19T08:24:29.551096Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c2/r3tmp/spilling-tmp-runner 2026-02-19T08:24:31.758693Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758753Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758781Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758807Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758832Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758868Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758906Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758942Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.758972Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.759007Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.759050Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.759112Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:31.760104Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:31.760144Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:31.760457Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:31.760607Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760661Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760702Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760765Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760819Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760864Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760903Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760951Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.760996Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:31.761038Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint3 ... 1:7608489344830648953:2530], CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.797768Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489344830648952:2529], CA [1:7608489344830648953:2530], CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798007Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489344830648952:2529] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 667 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 407 FinishTimeMs: 1771489471794 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 392 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471767 UpdateTimeMs: 1771489471794 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:31.798031Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489344830648952:2529] 2026-02-19T08:24:31.798059Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489344830648953:2530], CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798081Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489344830648953:2530], CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798338Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489344830648953:2530] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1219 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 359 FinishTimeMs: 1771489471795 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 342 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471768 UpdateTimeMs: 1771489471795 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.798361Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489344830648953:2530] 2026-02-19T08:24:31.798390Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798410Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489344830648954:2531], CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798647Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489344830648954:2531] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1170 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 812 FinishTimeMs: 1771489471795 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 797 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471769 UpdateTimeMs: 1771489471795 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.798672Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489344830648954:2531] 2026-02-19T08:24:31.798696Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798715Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489344830648955:2532], CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.798988Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489344830648955:2532] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1289 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 902 FinishTimeMs: 1771489471795 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 885 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471770 UpdateTimeMs: 1771489471795 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.799015Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489344830648955:2532] 2026-02-19T08:24:31.799035Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.799070Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489344830648956:2533], trace_id# 2026-02-19T08:24:31.799553Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489344830648956:2533] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2234 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1636 FinishTimeMs: 1771489471795 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 32 BuildCpuTimeUs: 1604 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489471772 UpdateTimeMs: 1771489471795 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:31.799580Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489344830648956:2533] 2026-02-19T08:24:31.799623Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489344830648942:2401] trace_id# 2026-02-19T08:24:31.799656Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.011683s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:24:31.823592Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:31.823643Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489344830648943:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg09pg2svkv80pedzeaebe, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4ODNlZjQtNzczN2M2ODMtNDQ1MDhiNzktMWE2ZDViNTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpBatchUpdate::TableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:24:26.213281Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489323479311439:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:26.213326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:26.291703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:26.591478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:26.596215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:26.596306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:26.676777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:26.689830Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:26.700136Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489323479311409:2081] 1771489466211413 != 1771489466211416 2026-02-19T08:24:26.797413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:26.814387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:26.814409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:26.814419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:26.814525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:27.236483Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:29.559087Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006be/r3tmp/spilling-tmp-runner/node_1_367a3e2e-1c38c829-81efce9-bd95133, actor: [1:7608489336364215140:2385] 2026-02-19T08:24:29.559296Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006be/r3tmp/spilling-tmp-runner 2026-02-19T08:24:31.937427Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937459Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937468Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937477Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937487Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937496Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937505Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937518Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937529Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937538Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937547Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:31.937599Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:24:31.937632Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:24:31.937658Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"Key1" (Just (Uint32 '101)))) (let $2 '('"Key2" (Just (String '"One")))) (let $3 '('"Name" (Just (String '"Name1")))) (let $4 (AsStruct $1 $2 $3 '('"Value2" (Just (String '"Value21"))))) (let $5 '('"Key2" (Just (String '"Two")))) (let $6 (AsStruct $1 $5 $3 '('"Value2" (Just (String '"Value22"))))) (let $7 '('"Name" (Just (String '"Name3")))) (let $8 (AsStruct $1 '('"Key2" (Just (String '"Three"))) $7 '('"Value2" (Just (String '"Value23"))))) (let $9 '('"Name" (Just (String '"Name2")))) (let $10 (AsStruct '('"Key1" (Just (Uint32 '102))) $2 $9 '('"Value2" (Just (String '"Value24"))))) (let $11 (AsStruct '('"Key1" (Just (Uint32 '103))) $2 $3 '('"Value2" (Just (String '"Value25"))))) (let $12 (AsStruct '('"Key1" (Just (Uint32 '104))) $2 $7 '('"Value2" (Just (String '"Value26"))))) (let $13 '('"Key1" (Just (Uint32 '105)))) (let $14 (AsStruct $13 $2 $9 '('"Value2" (Just (String '"Value27"))))) (let $15 (AsStruct $13 $5 '('"Name" (Just (String '"Name4"))) '('"Value2" (Just (String '"Value28"))))) (let $16 (AsStruct '('"Key1" (Just (Uint32 '106))) $2 $7 '('"Value2" (Just (String '"Value29"))))) (let $17 (AsStruct '('"Key1" (Just (Uint32 '108))) $2 '('"Name" (Nothing (OptionalType (DataType 'String)))) '('"Value2" (Just (String '"Value31"))))) (return (ToStream (Just (AsList $4 $6 $8 $10 $11 $12 $14 $15 $16 $17)))) )))) ) trace_id# 2026-02-19T08:24:31.937678Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,1] ast# ( (return (lambda '() (block '( (let $1 '('"Comment" (Just (String '"None")))) (let $2 '('"Group" (Just (Uint32 '1)))) (let $3 (AsStruct '('"Amount" (Just (Uint64 '"3500"))) $1 $2 '('"Name" (Just (String '"Anna"))))) (let $4 (AsStruct '('"Amount" (Just (Uint64 '"300"))) $1 $2 '('"Name" (Just (String '"Paul"))))) (let $5 (AsStruct '('"Amount" (Just (Uint64 '"7200"))) $1 '('"Group" (Just (Uint32 '2))) '('"Name" (Just (String '"Tony"))))) (return (ToStream (Just (AsList $3 $4 $5)))) )))) ) trace_id# 2026-02-19T08:24:31.937767Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,2] ast# ( (return (lambda '() (ToStream (Just (AsList (AsStruct '('"Key" (Just (String '1))) '('"Value" (Just (String '"One")))) (AsStruct '('"Key" (Just (String '2))) '('"Value" (Just (String '"Two"))))))))) ) trace_id# 2026-02-19T08:24:31.937794Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,3] ast# ( (return (lambda '() (block '( (let $1 '('"Col1" (Just (Uint32 '0)))) (let $2 '('"Col2" (Just (Uint64 '1)))) (let $3 (Just (Int64 '0))) (let $4 '('"Col3" $3)) (let $5 (Just (Int64 '3))) (let $6 (AsStruct $1 $2 $4 '('"Col4" $5))) (let $7 '('"Col1" (Just (Uint32 '1)))) (let $8 (Just (Int64 '1))) (let $9 '('"Col4" $8)) (let $10 (AsStruct $7 $2 $4 $9)) (let $11 '('"Col3" $8)) (let $12 '('"Col4" $3)) (let $13 (AsStruct $7 $2 $11 $12)) (let $14 (Just (Int64 '2))) (let $15 '('"Col3" $14)) (let $16 (AsStruct $7 $2 $15 $9)) (let $17 '('"Col1" (Just (Uint32 '2)))) (let $18 '('"Col4" $14)) (let $19 (AsStruct $17 $2 $4 $18)) (let $20 '('"Col2" (Just (Uint64 '2)))) (let $21 (AsStruct $7 $20 $4 $9)) (let $22 (AsStruct $7 $20 $11 $12)) (let $23 (AsStruct $17 $20 $4 $9)) (let $24 '('"Col1" (Just (Uint32 '3)))) (let $25 (AsStruct $24 $20 $11 '('"Col4" (Just (Int64 '5))))) (let $26 '('"Col2" (Just (Uint64 '3)))) (let $27 (AsStruct $1 $26 $4 $9)) (let $28 (AsStruct $7 $26 '('"Col3" $5) $12)) (let $29 (AsStruct $17 $26 $4 $9)) (let $30 (AsStruct $1 $26 $15 '('"Col4" (Just (Int64 '4))))) (let $31 (AsStruct $7 $26 $11 $9)) (let $32 (AsStruct $17 $26 $11 $18)) (let $33 (AsStruct $24 $26 $4 $9)) (return (ToStream (Just (AsList $6 $10 $13 $16 $19 $21 $22 $23 $25 $27 $28 $29 $30 $31 $32 $33)))) )))) ) trace_id# 2026-02-19T08:24:31.937809Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,4] ast# ( (return (lambda '() (ToStream (Just (AsList (AsStruct '('"Key" (Just (Uint64 '1))) '('"Value" (Just (String '"One")))) (AsStruct '('"Key" (Just (Uint64 '2))) '('"Value" (Just (String '"Two"))))))))) ) trace_id# 2026-02-19T08:24:31.937834Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,5] ast# ( (return (lambda '() (block '( (let $1 '('"Data" (Just (Int32 '1)))) (let $2 '('"Text" (Just (String '"Value1")))) (let $3 (AsStruct $1 '('"Key" (Just (Uint64 '101))) $2)) (let $4 '('"Data" (Just (Int32 '2)))) (let $5 (AsStruct $4 '('"Key" (Just (Uint64 '201))) $2)) (let $6 '('"Data" (Just (Int32 '3)))) (let $7 (AsStruct $6 '('"Key" (Just (Uint64 '301))) $2)) (let $8 (AsStruct $1 '('"Key" (Just (Uint64 '401))) $2)) (let $9 (AsStruct $4 '('"Key" (Just (Uint64 '501))) $2)) (let $10 (AsStruct $6 '('"Key" (Just (Uint64 '601))) $2)) (let $11 (AsStruct $1 '('"Key" (Just (Uint64 '701))) $2)) (let $12 (AsStruct $4 '('"Key" (Just (Uint64 '801))) $2)) (let $13 '('"Text" (Just (String '"Value2")))) (let $14 (AsStruct $6 '('"Key" (Just (Uint64 '102))) $13)) (let $15 (AsStruct $1 '('"Key" (Just (Uint64 '202))) $13)) (let $16 (AsStruct $4 '('"Key" (Just (Uint64 '302))) $13)) (let $17 (AsStruct $6 '('"Key" (Just (Uint64 '402))) $13)) (let $18 (AsStruct $1 '('"Key" (Just (Uint64 '502))) $13)) (let $19 (AsStruct $4 '('"Key" (Just (Uint64 '602))) $13)) (let $20 (AsStruct $6 '('"Key" (Just (Uint64 '702))) $13)) (let $21 (AsStruct $1 '('"Key" (Just (Uint64 '802))) $13)) (let $22 '('"Text" (Just (String '"Value3")))) (let $23 (AsStruct $4 '('"Key" (Just (Uint64 '103))) $22)) (let $24 (AsStruct $6 '('"Key" (Just (Uint64 '203))) $22)) (let $25 (AsStruct $1 '('"Key" (Just (Uint64 '303))) $22)) (let $26 (AsStruct $4 '('"Key" (Just (Uint64 '403))) $22)) (let $27 (AsStruct $6 '('"Key" (Just (Uint64 '503))) $22)) (let $28 (AsStruct $1 '('"Key" (Just (Uint64 '603))) $22)) (let $29 (AsStruct $4 '('"Key" (Just (Uint64 '703))) $22)) (let $30 (AsStruct $6 '('"Key" ... shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:32.058774Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037921 state# Executing trace_id# 2026-02-19T08:24:32.058798Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:24:32.058811Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:32.058829Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 5 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037921 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:32.058870Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037921 status# COMPLETE error# trace_id# 2026-02-19T08:24:32.058895Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:32.058913Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:32.058929Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:24:32.058940Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:32.058957Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:32.058983Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037922 status# COMPLETE error# trace_id# 2026-02-19T08:24:32.059021Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:32.059044Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:32.059058Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:24:32.059074Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 3 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:24:32.059108Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037923 status# COMPLETE error# trace_id# 2026-02-19T08:24:32.059132Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:24:32.059151Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:32.059194Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 2 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037925 (Executing), trace_id# 2026-02-19T08:24:32.059219Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037924 status# COMPLETE error# trace_id# 2026-02-19T08:24:32.059246Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037925 state# Executing trace_id# 2026-02-19T08:24:32.059264Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037925 (Executing), trace_id# 2026-02-19T08:24:32.059293Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037925 status# COMPLETE error# trace_id# 2026-02-19T08:24:32.059419Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:32.059455Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:32.059490Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489344954150813:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg09tpb1fdhxtxnsfk1gzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRjYzJkM2ItOWM5YWJkMTEtNGFlNGZjNWUtMjE1NTYyMDQ=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental >> KqpBatchPEA::ExecuteState_AbortDuringRetry [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithVectorIndex [GOOD] Test command err: 2026-02-19T08:24:23.356970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489309089697884:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:23.357800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:23.561510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:23.596174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:23.597171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:23.662122Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:23.669848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489309089697854:2081] 1771489463355426 != 1771489463355429 2026-02-19T08:24:23.683352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:23.734900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:23.786211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:23.786238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:23.786244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:23.786315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:26.709037Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c7/r3tmp/spilling-tmp-runner/node_1_18c8b726-bbde917e-a65fe2f4-1df0d47, actor: [1:7608489321974601574:2385] 2026-02-19T08:24:26.709264Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c7/r3tmp/spilling-tmp-runner 2026-02-19T08:24:29.083493Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083548Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083576Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083602Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083627Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083662Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083695Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083729Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083763Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083799Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083837Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:29.083899Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:29.086154Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:29.086202Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:29.086585Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:29.086766Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.086840Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.086886Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.086931Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087007Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087054Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087099Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087165Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087217Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:29.087264Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Group ... 026-02-19T08:24:29.112186Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489334859504555:2531] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1401 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 1037 FinishTimeMs: 1771489469108 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 1020 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489469095 UpdateTimeMs: 1771489469108 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:29.112201Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489334859504555:2531] 2026-02-19T08:24:29.112231Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489334859504554:2530], CA [1:7608489334859504556:2532], CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.112258Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489334859504554:2530], CA [1:7608489334859504556:2532], CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.112570Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489334859504556:2532] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1470 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 1064 FinishTimeMs: 1771489469108 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 1046 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489469097 UpdateTimeMs: 1771489469108 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:29.112607Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489334859504556:2532] 2026-02-19T08:24:29.112639Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489334859504554:2530], CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.112660Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489334859504554:2530], CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.112739Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489334859504554:2530] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2022 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 1045 FinishTimeMs: 1771489469108 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 1022 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489469095 UpdateTimeMs: 1771489469108 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:29.112760Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489334859504554:2530] 2026-02-19T08:24:29.112782Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.112808Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489334859504557:2533], trace_id# 2026-02-19T08:24:29.113251Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489334859504557:2533] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2551 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1963 FinishTimeMs: 1771489469108 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 36 BuildCpuTimeUs: 1927 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489469097 UpdateTimeMs: 1771489469108 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:29.113280Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489334859504557:2533] 2026-02-19T08:24:29.113324Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489334859504543:2401] trace_id# 2026-02-19T08:24:29.113376Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.013978s ReadRows: 0 ReadBytes: 0 RequestUnits# 9 ForceFlag# true trace_id# 2026-02-19T08:24:29.142617Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:29.142664Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489334859504544:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg072q81f0pg9w4esgp4gt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2YWRlYjQtZTQ3YmE1NTQtYjBlMzc3ODAtZjQwMmQ4Nzc=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:30.048192Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489338905629499:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:30.048255Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:30.064325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:30.161532Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:30.166295Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489338905629468:2081] 1771489470046670 != 1771489470046673 2026-02-19T08:24:30.174234Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:30.175266Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:30.175339Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:30.196566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:30.266816Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:30.266834Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:30.266840Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:30.266897Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:30.378349Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:33.144979Z node 2 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c7/r3tmp/spilling-tmp-runner/node_2_d9b95eaa-263ec854-2a0cca2-680c1986, actor: [2:7608489351790532201:2310] 2026-02-19T08:24:33.145192Z node 2 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c7/r3tmp/spilling-tmp-runner >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> KqpBatchUpdate::ManyPartitions_2 |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] >> THealthCheckTest::BridgeTimeDifference [GOOD] >> KqpBatchDelete::TableWithUniqueSyncIndex-EnableIndexStreamWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] Test command err: 2026-02-19T08:22:57.962165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:57.962249Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:58.209256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:59.464909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:22:59.648253Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.648767Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.649502Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2040749097028815281 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:59.652951Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:59.813207Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.813664Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.817814Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17758198266165749683 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:59.874037Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.874596Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.877919Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8573573288617686956 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:59.919970Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.920491Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.920667Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpRyueuo/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3447958734103051356 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceIn ... S_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.010187s 2026-02-19T08:23:32.225233Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:32.225324Z node 55 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:32.487741Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:35.980581Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:35.980669Z node 64 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:36.143168Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:37.314632Z node 65 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:23:37.315341Z node 65 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpCpumF1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:23:37.315638Z node 65 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpCpumF1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmpCpumF1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13367632436271084208 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:23:40.230595Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:40.230681Z node 73 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:40.387235Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:44.022931Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:44.023076Z node 82 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:44.141751Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:45.139838Z node 87 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:23:45.140707Z node 87 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmp909l53/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:23:45.141068Z node 87 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmp909l53/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036ae/r3tmp/tmp909l53/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 796368878835384263 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:22.326507Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:22.326612Z node 91 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:22.505243Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:23.944936Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:23.945031Z node 92 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:24.064328Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:25.527979Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:25.528075Z node 93 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:25.661306Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:27.168858Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:27.168955Z node 94 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:27.294279Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:28.609186Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:28.609293Z node 95 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:28.725198Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:29.951368Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:29.951454Z node 96 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:30.055239Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:31.440784Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:31.440903Z node 97 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:31.542513Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:33.159360Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:33.159463Z node 98 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:33.258282Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2026-02-19T08:23:53.957239Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:53.957383Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:53.976922Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:53.977038Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:53.992095Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:53.992738Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=14806377672962996576, session=0, seqNo=0) 2026-02-19T08:23:53.992918Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:23:54.015625Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=14806377672962996576, session=1) 2026-02-19T08:23:54.015941Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=7218378594518079843, session=0, seqNo=0) 2026-02-19T08:23:54.016073Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:23:54.034849Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=7218378594518079843, session=2) 2026-02-19T08:23:54.035738Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:23:54.035890Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:23:54.035998Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:54.036200Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=222, session=2, semaphore="Lock2" count=1) 2026-02-19T08:23:54.036277Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-02-19T08:23:54.036348Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-02-19T08:23:54.036465Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=333, session=1, semaphore="Lock2" count=1) 2026-02-19T08:23:54.036528Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-02-19T08:23:54.048541Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-02-19T08:23:54.048613Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=222) 2026-02-19T08:23:54.048649Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=333) 2026-02-19T08:23:54.049241Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=17801517505696481673, name="Lock1") 2026-02-19T08:23:54.049355Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=17801517505696481673) 2026-02-19T08:23:54.049890Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=410407487494337437, name="Lock2") 2026-02-19T08:23:54.049963Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=410407487494337437) 2026-02-19T08:23:54.063959Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:23:54.064075Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:23:54.064565Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:23:54.065120Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:23:54.117258Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:23:54.117448Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:23:54.117510Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-02-19T08:23:54.117588Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-02-19T08:23:54.118022Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:193:2206], cookie=10590320255488166808, name="Lock1") 2026-02-19T08:23:54.118102Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:193:2206], cookie=10590320255488166808) 2026-02-19T08:23:54.118664Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:201:2213], cookie=14418790659645182034, name="Lock2") 2026-02-19T08:23:54.118740Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:201:2213], cookie=14418790659645182034) 2026-02-19T08:23:54.555733Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:54.568238Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:54.931470Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:54.943624Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:55.364507Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:55.377926Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:55.743141Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:55.755306Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:56.129442Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.141310Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:56.495389Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.507651Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:56.837704Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:56.850025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.218111Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:57.230128Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.580650Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:57.592988Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:57.996132Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:58.008218Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:58.369919Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:58.384579Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:58.747785Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:58.759716Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.109399Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.121382Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.483291Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.497827Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:23:59.916946Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:23:59.929126Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:00.292434Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:00.304909Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:00.667619Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:00.679427Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:01.040199Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:01.052461Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:01.413034Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:01.425220Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:01.817167Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:01.829316Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:02.191411Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:02.203519Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:02.569643Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:02.582582Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:02.955422Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:02.970564Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:03.332179Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:24:03.344183Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:24:03.720064Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... Complete (sender=[4:367:2347], cookie=12430165564311196207) 2026-02-19T08:24:34.801961Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:370:2350], cookie=16341415668481949222, name="Lock2") 2026-02-19T08:24:34.802015Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:370:2350], cookie=16341415668481949222) 2026-02-19T08:24:34.802326Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:373:2353], cookie=3640417131744660180) 2026-02-19T08:24:34.802368Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:373:2353], cookie=3640417131744660180) 2026-02-19T08:24:34.819921Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:34.820045Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:34.820544Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:34.821213Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:34.871184Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:34.871357Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-02-19T08:24:34.871416Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-02-19T08:24:34.871832Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:412:2383], cookie=12040719807822749922) 2026-02-19T08:24:34.871916Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:412:2383], cookie=12040719807822749922) 2026-02-19T08:24:34.872408Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2389], cookie=830312057395215775, name="Lock1") 2026-02-19T08:24:34.872473Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2389], cookie=830312057395215775) 2026-02-19T08:24:34.872889Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:422:2392], cookie=2408742847692662194, name="Lock2") 2026-02-19T08:24:34.872953Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:422:2392], cookie=2408742847692662194) 2026-02-19T08:24:35.316114Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:35.316247Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:35.335842Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:35.335985Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:35.361575Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:35.362565Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2161], cookie=13227821641665062577, session=0, seqNo=0) 2026-02-19T08:24:35.362775Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:24:35.376489Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2161], cookie=13227821641665062577, session=1) 2026-02-19T08:24:35.376904Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:138:2162], cookie=3639570839223444894, session=0, seqNo=0) 2026-02-19T08:24:35.377062Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:24:35.390806Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:138:2162], cookie=3639570839223444894, session=2) 2026-02-19T08:24:35.392022Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:24:35.392191Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:24:35.392288Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:24:35.404551Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=111) 2026-02-19T08:24:35.404933Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-02-19T08:24:35.405087Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-02-19T08:24:35.405176Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-02-19T08:24:35.418306Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=112) 2026-02-19T08:24:35.418758Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2161], cookie=333, session=1, semaphore="Lock1" count=1) 2026-02-19T08:24:35.419032Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:24:35.419154Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-02-19T08:24:35.419266Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:138:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-02-19T08:24:35.432229Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2161], cookie=333) 2026-02-19T08:24:35.432331Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:138:2162], cookie=222) 2026-02-19T08:24:35.432364Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:138:2162], cookie=223) 2026-02-19T08:24:35.432981Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2186], cookie=13407423511456000499, name="Lock1") 2026-02-19T08:24:35.433090Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2186], cookie=13407423511456000499) 2026-02-19T08:24:35.433606Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=4902857684850462003, name="Lock2") 2026-02-19T08:24:35.433691Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=4902857684850462003) 2026-02-19T08:24:35.434183Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=6934650298087010272, name="Lock1") 2026-02-19T08:24:35.434261Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=6934650298087010272) 2026-02-19T08:24:35.434781Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:173:2195], cookie=8703115935462212445, name="Lock2") 2026-02-19T08:24:35.434868Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:173:2195], cookie=8703115935462212445) 2026-02-19T08:24:35.435207Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:138:2162], cookie=444, session=2, semaphore="Lock2" count=1) 2026-02-19T08:24:35.435372Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-02-19T08:24:35.449397Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:138:2162], cookie=444) 2026-02-19T08:24:35.450166Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:178:2200], cookie=10415629558677810471, name="Lock2") 2026-02-19T08:24:35.450275Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:178:2200], cookie=10415629558677810471) 2026-02-19T08:24:35.450853Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:181:2203], cookie=14889013118560723584, name="Lock2") 2026-02-19T08:24:35.450940Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:181:2203], cookie=14889013118560723584) 2026-02-19T08:24:35.467189Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:24:35.467314Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:24:35.467848Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:24:35.468242Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:24:35.519845Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:24:35.520042Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:24:35.520097Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-02-19T08:24:35.520129Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-02-19T08:24:35.520156Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-02-19T08:24:35.520545Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:220:2233], cookie=13247834654189938691, name="Lock1") 2026-02-19T08:24:35.520647Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:220:2233], cookie=13247834654189938691) 2026-02-19T08:24:35.521280Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:228:2240], cookie=7742565289457357501, name="Lock2") 2026-02-19T08:24:35.521355Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:228:2240], cookie=7742565289457357501) |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::ExecuteState_AbortDuringRetry [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:25.381688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:25.480191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:25.500459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:25.500540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:25.501093Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:25.783659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:25.783797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:25.863639Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489462531598 != 1771489462531602 2026-02-19T08:24:25.873053Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:25.920312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:26.055844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:26.477334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:26.477420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:26.477489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:26.478033Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:26.490655Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:27.983301Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.983435Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:27.983783Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:27.983878Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:27.984124Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:24:27.984302Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.984490Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:24:27.984559Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:27.984798Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:27.985236Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1234:2785] 2026-02-19T08:24:27.985333Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1234:2785], channels: 0 2026-02-19T08:24:27.985432Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:27.985486Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:27.985530Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1234:2785] 2026-02-19T08:24:27.985579Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1234:2785], channels: 0 2026-02-19T08:24:27.985654Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1234:2785], trace_id# 2026-02-19T08:24:27.985733Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1234:2785], trace_id# 2026-02-19T08:24:27.985788Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:27.999056Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1234:2785] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:27.999191Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1234:2785], trace_id# 2026-02-19T08:24:27.999255Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1234:2785], trace_id# 2026-02-19T08:24:28.000469Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1234:2785] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1897 Tasks { TaskId: 1 CpuTimeUs: 1135 FinishTimeMs: 1771489467999 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 43 BuildCpuTimeUs: 1092 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489467986 UpdateTimeMs: 1771489468000 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:28.000559Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5MTNhMjktZTgxYjNmMmEtOTJiOGE0ZTgtNTIyY2VlNDE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1234:2785] 2026-02-19T08:24:28.000646Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1231:2785] TxId: 281474976715661. Ctx: { TraceId: 01khtg07dw41y4vmqezfqakb5h, Database: /Root, SessionId: yd ... Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:34.673613Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1317:3054] TxId: 281474976715665. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:34.673936Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1317:3054] PartitionIndex# 0 Status# OVERLOADED 2026-02-19T08:24:34.674011Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:242} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Executer retriable error, will retry. PartitionIndex# 0 Status# OVERLOADED Issues# 2026-02-19T08:24:34.674078Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:744} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Scheduling retry for partition. PartitionIndex# 0 OldDelay# 50 NewDelay# 76 OldLimit# 10000 NewLimit# 5000 2026-02-19T08:24:34.675101Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1319:3054] TxId: 281474976715666. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:34.675145Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1319:3054] TxId: 281474976715666. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:34.675209Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1323:3054] TxId: 281474976715668. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:34.675237Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1323:3054] TxId: 281474976715668. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:34.675301Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1321:3054] TxId: 281474976715667. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:34.675331Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1321:3054] TxId: 281474976715667. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:34.675426Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1319:3054] PartitionIndex# 1 Status# OVERLOADED 2026-02-19T08:24:34.675471Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:242} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Executer retriable error, will retry. PartitionIndex# 1 Status# OVERLOADED Issues# 2026-02-19T08:24:34.675510Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:744} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Scheduling retry for partition. PartitionIndex# 1 OldDelay# 50 NewDelay# 106 OldLimit# 10000 NewLimit# 5000 2026-02-19T08:24:34.675782Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1323:3054] PartitionIndex# 3 Status# OVERLOADED 2026-02-19T08:24:34.675824Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:242} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Executer retriable error, will retry. PartitionIndex# 3 Status# OVERLOADED Issues# 2026-02-19T08:24:34.675863Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:744} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Scheduling retry for partition. PartitionIndex# 3 OldDelay# 50 NewDelay# 117 OldLimit# 10000 NewLimit# 5000 2026-02-19T08:24:34.676068Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1321:3054] PartitionIndex# 2 Status# OVERLOADED 2026-02-19T08:24:34.676106Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:242} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Executer retriable error, will retry. PartitionIndex# 2 Status# OVERLOADED Issues# 2026-02-19T08:24:34.676139Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:744} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Scheduling retry for partition. PartitionIndex# 2 OldDelay# 50 NewDelay# 147 OldLimit# 10000 NewLimit# 5000 2026-02-19T08:24:34.739992Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:174} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got abort execution. Sender# [0:7307199536658146131:7762515] FromSessionActor# false StatusCode# ABORTED Issues# {
: Error: Test abort during retry } 2026-02-19T08:24:34.740133Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:853} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: First error occurred. Status# ABORTED Issues# {
: Error: Test abort during retry } 2026-02-19T08:24:34.740217Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:630} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Entering AbortState, trying to finish execution. ActivePartitionsCount# 4 ReturnStatus# ABORTED 2026-02-19T08:24:34.740310Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:262} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Delayed execution timer fired. PartitionIndex# 0 2026-02-19T08:24:34.740391Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:714} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Partition retry cancelled due to AbortState. PartitionIndex# 0 2026-02-19T08:24:34.740477Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 3 TotalPartitions# 4 2026-02-19T08:24:34.740530Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:262} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Delayed execution timer fired. PartitionIndex# 1 2026-02-19T08:24:34.740562Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:714} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Partition retry cancelled due to AbortState. PartitionIndex# 1 2026-02-19T08:24:34.740598Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:24:34.740632Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:262} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Delayed execution timer fired. PartitionIndex# 3 2026-02-19T08:24:34.740664Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:714} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Partition retry cancelled due to AbortState. PartitionIndex# 3 2026-02-19T08:24:34.740695Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:24:34.740733Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:262} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Delayed execution timer fired. PartitionIndex# 2 2026-02-19T08:24:34.740769Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:714} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Partition retry cancelled due to AbortState. PartitionIndex# 2 2026-02-19T08:24:34.740878Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# ABORTED Issues# [ {
: Error: Test abort during retry } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:24:34.741448Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:34.741877Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1365:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1364:3054] trace_id# 2026-02-19T08:24:34.742131Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1365:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:34.742193Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1365:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0dx44nyn7jfste20zym2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTkxYWRhMTYtNDNkMjg4ZDMtNjE0MTUxOGMtZGYyYjQ5MjU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpBatchDelete::TableWithIndex [GOOD] >> KqpBatchDelete::TableWithFullTextIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:24:23.323521Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489309898232508:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:23.323744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:23.562733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:23.609121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:23.609214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:23.673920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489309898232470:2081] 1771489463320909 != 1771489463320912 2026-02-19T08:24:23.674727Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:23.684088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:23.788700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:23.809191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:23.809215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:23.809227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:23.809325Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:26.552765Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006c8/r3tmp/spilling-tmp-runner/node_1_105b5311-5ec9ac5f-ec728642-8320b90c, actor: [1:7608489322783136196:2385] 2026-02-19T08:24:26.552880Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006c8/r3tmp/spilling-tmp-runner 2026-02-19T08:24:28.866878Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.866923Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.866946Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.866964Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.866981Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867005Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867028Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867059Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867085Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867108Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867144Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:28.867188Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg06x31cq9xgkkmsfcd2ak, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWRiNGYyYjUtNWM2ZjljYzgtMzZiOTJjNmEtYTJmMjU3MzE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:28.867511Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489331373071873:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg06x31cq9xgkkmsfcd2ak, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWRiNGYyYjUtNWM2ZjljYzgtMzZiOTJjNmEtYTJmMjU3MzE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:28.867557Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489331373071873:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg06x31cq9xgkkmsfcd2ak, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWRiNGYyYjUtNWM2ZjljYzgtMzZiOTJjNmEtYTJmMjU3MzE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:28.868548Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:24:28.868765Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.868836Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.868882Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.868939Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.868992Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.869036Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.869088Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.869140Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.869185Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:28.869242Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... # true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:35.238061Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:35.238081Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [2:7608489362430731891:2546] 2026-02-19T08:24:35.238113Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [2:7608489362430731890:2545] 2026-02-19T08:24:35.238141Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489362430731890:2545], channels: 1 2026-02-19T08:24:35.238156Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489362430731891:2546], channels: 1 2026-02-19T08:24:35.238206Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.238246Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.238278Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:35.238364Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489362430731890:2545] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:35.238394Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.238421Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.239178Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489362430731891:2546] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:35.239205Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.239231Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489362430731890:2545], CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.240496Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489362430731890:2545] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1582 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 767 FinishTimeMs: 1771489475240 OutputBytes: 1 Tables { TablePath: "/Root/KeyValue" AffectedPartitions: 1 } ComputeCpuTimeUs: 77 BuildCpuTimeUs: 690 WaitOutputTimeUs: 48 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489475239 CreateTimeMs: 1771489475238 UpdateTimeMs: 1771489475240 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.240547Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489362430731890:2545] 2026-02-19T08:24:35.240580Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.240634Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489362430731891:2546], trace_id# 2026-02-19T08:24:35.240792Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489362430731891:2546] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 893 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 432 FinishTimeMs: 1771489475240 InputBytes: 1 ComputeCpuTimeUs: 30 BuildCpuTimeUs: 402 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489475240 CreateTimeMs: 1771489475238 UpdateTimeMs: 1771489475240 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.240815Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489362430731891:2546] 2026-02-19T08:24:35.240916Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7608489362430731885:2536] trace_id# 2026-02-19T08:24:35.240972Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002475s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:24:35.242455Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:35.242484Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489362430731886:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg0ehs4qmkp48hmk1wez5n, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTlkNGM0NWYtY2JiMmE2NmYtMjAxOGNkYTktZTIzYzdiZjg=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:35.242567Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:7608489362430731883:2536], ActorState: ExecuteState, Operation: BATCH DELETE, ActivePartitions: 1, Message: Got tx response. Sender# [2:7608489362430731886:2536] PartitionIndex# 0 Status# SUCCESS 2026-02-19T08:24:35.242615Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:683} ActorId: [2:7608489362430731883:2536], ActorState: ExecuteState, Operation: BATCH DELETE, ActivePartitions: 1, Message: Partition finished completely. PartitionIndex# 0 2026-02-19T08:24:35.242655Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:7608489362430731883:2536], ActorState: ExecuteState, Operation: BATCH DELETE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# SUCCESS Issues# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithUniqueSyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:24:22.354057Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489308331302909:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:22.360980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:22.409896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:22.705546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:22.705647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:22.728397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:22.777140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:22.830561Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:22.835111Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489308331302873:2081] 1771489462352453 != 1771489462352456 2026-02-19T08:24:22.918943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:22.918974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:22.918981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:22.919069Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:22.972765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:25.573679Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006cb/r3tmp/spilling-tmp-runner/node_1_eb331cb3-93a8a430-d3213290-c4ada0ec, actor: [1:7608489321216205615:2310] 2026-02-19T08:24:25.574062Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006cb/r3tmp/spilling-tmp-runner 2026-02-19T08:24:26.367902Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:26.368013Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:26.368504Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:26.368546Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:26.368907Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 2 2026-02-19T08:24:26.369070Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:26.369125Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindSyncIndexTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL, Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:26.369353Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v2" (Just (String '"456")))) (let $2 '('"v3" (Just (String '"787")))) (let $3 (AsStruct '('"k" (Int32 '1)) '('"v1" (Just (String '"123"))) $1 $2)) (let $4 '('"v3" (Just (String '"789")))) (let $5 (AsStruct '('"k" (Int32 '"2")) '('"v1" (Just (String '"124"))) $1 $4)) (let $6 '('"v2" (Just (String '"457")))) (let $7 (AsStruct '('"k" (Int32 '"3")) '('"v1" (Just (String '"125"))) $6 $2)) (let $8 (AsStruct '('"k" (Int32 '"4")) '('"v1" (Just (String '"126"))) $6 $4)) (let $9 (AsStruct '('"k" (Int32 '"5")) '('"v1" (Just (String '"127"))) '('"v2" (Just (String '"458"))) $2)) (return (Iterator (AsList $3 $5 $7 $8 $9))) )))) ) 2026-02-19T08:24:26.369379Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:26.369636Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1771489466138} 2026-02-19T08:24:26.370476Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489325511173246:2326] 2026-02-19T08:24:26.370513Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489325511173246:2326], channels: 0 2026-02-19T08:24:26.370578Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:26.370612Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:26.370848Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489325511173246:2326] 2026-02-19T08:24:26.370881Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489325511173246:2326], channels: 0 2026-02-19T08:24:26.370938Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489325511173246:2326], trace_id# 2026-02-19T08:24:26.370997Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489325511173246:2326], trace_id# 2026-02-19T08:24:26.371031Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:26.401743Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489325511173246:2326] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:26.401820Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489325511173246:2326], trace_id# 2026-02-19T08:24:26.401883Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489325511173243:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg05sgdbe70b4txpy7wnrd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNWIxODktYmIwOGNhMmEtNTVjMDQyNTYtMzNmYmNjMzY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor( ... 360607250750:2370], trace_id# 2026-02-19T08:24:35.506602Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:7608489360607250754:2374], CA [2:7608489360607250751:2371], CA [2:7608489360607250752:2372], CA [2:7608489360607250753:2373], CA [2:7608489360607250750:2370], trace_id# 2026-02-19T08:24:35.507679Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489360607250751:2371] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 4700 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 4531 FinishTimeMs: 1771489475506 OutputBytes: 1 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 4497 WaitOutputTimeUs: 15 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489475501 UpdateTimeMs: 1771489475506 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.507721Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489360607250751:2371] 2026-02-19T08:24:35.507758Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489360607250754:2374], CA [2:7608489360607250752:2372], CA [2:7608489360607250753:2373], CA [2:7608489360607250750:2370], trace_id# 2026-02-19T08:24:35.507784Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:7608489360607250754:2374], CA [2:7608489360607250752:2372], CA [2:7608489360607250753:2373], CA [2:7608489360607250750:2370], trace_id# 2026-02-19T08:24:35.507905Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489360607250750:2370] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 771 Tasks { TaskId: 1 CpuTimeUs: 406 FinishTimeMs: 1771489475507 EgressBytes: 105 EgressRows: 5 ComputeCpuTimeUs: 17 BuildCpuTimeUs: 389 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489475500 UpdateTimeMs: 1771489475507 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.507939Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489360607250750:2370] 2026-02-19T08:24:35.507975Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489360607250754:2374], CA [2:7608489360607250752:2372], CA [2:7608489360607250753:2373], trace_id# 2026-02-19T08:24:35.507995Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:7608489360607250754:2374], CA [2:7608489360607250752:2372], CA [2:7608489360607250753:2373], trace_id# 2026-02-19T08:24:35.508078Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489360607250752:2372] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 5690 DurationUs: 2000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 712 FinishTimeMs: 1771489475507 InputBytes: 1 ComputeCpuTimeUs: 25 BuildCpuTimeUs: 687 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489475505 CreateTimeMs: 1771489475502 UpdateTimeMs: 1771489475507 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.508092Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489360607250752:2372] 2026-02-19T08:24:35.508113Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489360607250754:2374], CA [2:7608489360607250753:2373], trace_id# 2026-02-19T08:24:35.508138Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489360607250754:2374], CA [2:7608489360607250753:2373], trace_id# 2026-02-19T08:24:35.508489Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489360607250753:2373] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 3697 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 1467 FinishTimeMs: 1771489475507 OutputRows: 5 OutputBytes: 52 ComputeCpuTimeUs: 92 BuildCpuTimeUs: 1375 WaitOutputTimeUs: 23 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489475503 UpdateTimeMs: 1771489475507 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.508523Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489360607250753:2373] 2026-02-19T08:24:35.508562Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489360607250754:2374], trace_id# 2026-02-19T08:24:35.508609Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489360607250754:2374], trace_id# 2026-02-19T08:24:35.508867Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489360607250754:2374] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2073 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 1661 FinishTimeMs: 1771489475508 InputRows: 5 InputBytes: 52 EgressBytes: 80 EgressRows: 5 ComputeCpuTimeUs: 58 BuildCpuTimeUs: 1603 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489475507 CreateTimeMs: 1771489475505 UpdateTimeMs: 1771489475508 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:35.508902Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489360607250754:2374] 2026-02-19T08:24:35.508953Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7608489360607250724:2327] trace_id# 2026-02-19T08:24:35.508994Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.016931s ReadRows: 0 ReadBytes: 0 RequestUnits# 11 ForceFlag# true trace_id# 2026-02-19T08:24:35.513768Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:35.513798Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489360607250747:2327] TxId: 281474976710663. Ctx: { TraceId: 01khtg0e333cgmxpk2ckwgpr2p, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZjYzN2ZhMTEtYzMxYWI1YjYtMzQ1ZGRhOWMtOWNkNzkwNzA=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> TSchemeShardUserAttrsTest::MkDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] Test command err: 2026-02-19T08:23:24.975501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489057987301645:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:24.976240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:25.369133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:23:25.460863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:25.460995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:25.492952Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:25.495625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489057987301615:2081] 1771489404972805 != 1771489404972808 2026-02-19T08:23:25.516940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:25.635974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:25.661851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:25.661878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:25.661887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:25.661994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:23:25.953368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:25.990594Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:23:38.387563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:38.388195Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:23:38.389353Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:38.400307Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:38.410338Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:38.412644Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:38.413116Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:38.413345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:38.414909Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:38.415017Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:38.830389Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:38.948883Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:38.949012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:38.949612Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:38.949667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:39.000658Z node 2 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:23:39.003274Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:39.003969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:39.096630Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:39.110599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:40.754959Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:40.755032Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:40.755079Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:40.755541Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-02-19T08:23:49.713653Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:49.714632Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:49.733446Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:49.734908Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:49.735359Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:49.735490Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:49.737082Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:49.737490Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:49.737596Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:50.152932Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:50.304237Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:50.304407Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:50.304998Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:50.305093Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:50.382469Z node 4 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:23:50.383063Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:50.383595Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:50.490159Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:23:50.524876Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:51.624924Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:23:51.624983Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:23:51.625019Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:23:51.625513Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE ... snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:03.189196Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:03.278279Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:03.278447Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:03.338936Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:03.454724Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:04.285781Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:04.285859Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:04.285908Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:04.286487Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:04.347299Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-02-19T08:24:15.959520Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:15.966297Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:15.969144Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:15.969425Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:15.969601Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:16.338968Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:16.429906Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:16.430059Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:16.465474Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:16.537977Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:17.434231Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:17.434302Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:17.434349Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:17.434801Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:33.625207Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:33.625691Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:33.634502Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:33.635047Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:33.681395Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:24:33.681484Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:24:33.763598Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:33.763670Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:33.763716Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:33.764787Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1223:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:33.766200Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:33.767105Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:33.775529Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:1227:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:33.775755Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:1233:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:33.776208Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:33.776281Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:33.776594Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:33.776640Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:33.778409Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1238:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:33.778745Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:33.779054Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:34.171271Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.171463Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.193825Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:34.213157Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:34.221311Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.221427Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.222566Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.222663Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.228593Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.228701Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.244469Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:160:2075] 1771489458655738 != 1771489458655742 2026-02-19T08:24:34.244694Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [15:119:2075] 1771489458651601 != 1771489458651605 2026-02-19T08:24:34.245320Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:78:2075] 1771489458646511 != 1771489458646515 2026-02-19T08:24:34.280739Z node 13 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-02-19T08:24:34.281272Z node 13 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-02-19T08:24:34.281588Z node 13 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-02-19T08:24:34.281883Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:34.282382Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:34.282534Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:34.282654Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected >> KqpBatchPEA::ExecuteState_MinKeyErrorIssues [GOOD] >> KqpBatchPEA::ExecuteState_UnknownEvent >> GenericFederatedQuery::ClickHouseManagedSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:24:30.230450Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489342956432237:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:30.230510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:30.267877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:30.543994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:30.544128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:30.549433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:30.582352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:30.629917Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:30.639347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489342956432097:2081] 1771489470218858 != 1771489470218861 2026-02-19T08:24:30.700522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:30.700547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:30.700584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:30.700697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:30.762157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:33.515307Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006ba/r3tmp/spilling-tmp-runner/node_1_8bd65319-5f3c350a-7451304f-4eeeb1c4, actor: [1:7608489355841335832:2385] 2026-02-19T08:24:33.515455Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006ba/r3tmp/spilling-tmp-runner 2026-02-19T08:24:35.737760Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737820Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737848Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737880Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737906Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737938Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.737994Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.738035Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.738075Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.738112Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.738152Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.738218Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0dph36pq03mwmsrpg2cd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDM4YWQ5NDQtN2VjMzVhNTgtYmYwNzliZGQtNzJlZmFiMWM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:35.738396Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489364431271504:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0dph36pq03mwmsrpg2cd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDM4YWQ5NDQtN2VjMzVhNTgtYmYwNzliZGQtNzJlZmFiMWM=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:35.738434Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489364431271504:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0dph36pq03mwmsrpg2cd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDM4YWQ5NDQtN2VjMzVhNTgtYmYwNzliZGQtNzJlZmFiMWM=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:35.738738Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:35.738891Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.738988Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739060Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739106Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739164Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739221Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739265Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739307Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739371Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.739451Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [Ow ... compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:36.189923Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:36.189936Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489368726239036:2549] 2026-02-19T08:24:36.189954Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489368726239035:2548] 2026-02-19T08:24:36.189972Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489368726239035:2548], channels: 1 2026-02-19T08:24:36.189986Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489368726239036:2549], channels: 1 2026-02-19T08:24:36.190019Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.190045Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.190066Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:36.190143Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368726239035:2548] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:36.190170Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.190202Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.190231Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368726239036:2549] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:36.190253Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.190273Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489368726239035:2548], CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.191375Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368726239035:2548] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1145 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 189 FinishTimeMs: 1771489476191 OutputBytes: 1 Tables { TablePath: "/Root/KeyValue" AffectedPartitions: 1 } ComputeCpuTimeUs: 57 BuildCpuTimeUs: 132 WaitOutputTimeUs: 33 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489476190 CreateTimeMs: 1771489476189 UpdateTimeMs: 1771489476191 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:36.191425Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489368726239035:2548] 2026-02-19T08:24:36.191454Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.191478Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489368726239036:2549], trace_id# 2026-02-19T08:24:36.191559Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368726239036:2549] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 485 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 90 FinishTimeMs: 1771489476191 InputBytes: 1 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 56 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489476190 CreateTimeMs: 1771489476190 UpdateTimeMs: 1771489476191 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:36.191581Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489368726239036:2549] 2026-02-19T08:24:36.191656Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489368726239030:2536] trace_id# 2026-02-19T08:24:36.191692Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001630s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:24:36.192442Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:36.192467Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489368726239031:2536] TxId: 281474976710674. Ctx: { TraceId: 01khtg0f7r0wjmgstabdxqr4s2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyMDNkZTgtYmQyMWJmMmQtNzcxYzA4MmMtNTMxN2ZhMTQ=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:36.192510Z node 1 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [1:7608489368726239017:2536], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [1:7608489368726239031:2536] PartitionIndex# 0 Status# SUCCESS 2026-02-19T08:24:36.192546Z node 1 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:683} ActorId: [1:7608489368726239017:2536], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Partition finished completely. PartitionIndex# 0 2026-02-19T08:24:36.192595Z node 1 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [1:7608489368726239017:2536], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# SUCCESS Issues# >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy >> KqpBatchDelete::DeleteOn [GOOD] >> KqpBatchDelete::ColumnTable >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> KqpBatchUpdate::UnknownColumn [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> GenericFederatedQuery::IcebergHiveSaSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:38.328444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:38.328541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:38.328606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:38.328665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:38.328704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:38.328747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:38.328812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:38.328848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:38.328924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:38.329005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:38.330161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:38.330475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:38.425433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:38.425488Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:38.443877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:38.444721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:38.444925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:38.456126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:38.456780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:38.457330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:38.457591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:38.460219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:38.460370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:38.461354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:38.461398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:38.461501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:38.461548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:38.461579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:38.461766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:38.613024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.614976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.615082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.615153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.615238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:38.615299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... StateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:39.360005Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:39.360195Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 166us result status StatusSuccess 2026-02-19T08:24:39.360562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:39.361104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:39.361231Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 157us result status StatusSuccess 2026-02-19T08:24:39.361541Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:39.362152Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:39.362385Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 155us result status StatusSuccess 2026-02-19T08:24:39.362711Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:39.363234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:39.363418Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 156us result status StatusSuccess 2026-02-19T08:24:39.363782Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> TxUsage::WriteToTopic_Demo_13_Table [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: 2026-02-19T08:24:33.202402Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489355763535352:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:33.202450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:33.568599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:33.568670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:33.592316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:33.611637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489355763535304:2081] 1771489473197552 != 1771489473197555 2026-02-19T08:24:33.639643Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:33.647247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:33.790160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:33.790187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:33.790197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:33.790265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:33.893351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:36.472068Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b3/r3tmp/spilling-tmp-runner/node_1_c8528a7b-7ed0aa78-7addbd75-f5098ae4, actor: [1:7608489368648439034:2385] 2026-02-19T08:24:36.472242Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b3/r3tmp/spilling-tmp-runner 2026-02-19T08:24:38.799824Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.799888Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.799919Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.799949Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.799976Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800014Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800051Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800086Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800122Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800162Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800202Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.800261Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:38.801970Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:38.802019Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:38.802385Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:24:38.802538Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802590Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802637Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802683Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802738Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802784Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802830Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802880Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802930Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.802980Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 608489377238374715:2532], CA [1:7608489377238374716:2533], CA [1:7608489377238374712:2529], CA [1:7608489377238374713:2530], trace_id# 2026-02-19T08:24:38.849608Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [1:7608489377238374714:2531], CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], CA [1:7608489377238374712:2529], CA [1:7608489377238374713:2530], trace_id# 2026-02-19T08:24:38.849672Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489377238374712:2529] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 843 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 510 FinishTimeMs: 1771489478847 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 497 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489478811 UpdateTimeMs: 1771489478847 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:38.849684Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489377238374712:2529] 2026-02-19T08:24:38.849727Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489377238374714:2531], CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], CA [1:7608489377238374713:2530], trace_id# 2026-02-19T08:24:38.849754Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489377238374714:2531], CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], CA [1:7608489377238374713:2530], trace_id# 2026-02-19T08:24:38.849848Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489377238374713:2530] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1677 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 442 FinishTimeMs: 1771489478847 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 16 BuildCpuTimeUs: 426 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489478812 UpdateTimeMs: 1771489478847 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:38.849866Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489377238374713:2530] 2026-02-19T08:24:38.849895Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489377238374714:2531], CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.849916Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489377238374714:2531], CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.849988Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489377238374714:2531] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 5258 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 4758 FinishTimeMs: 1771489478847 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 4740 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489478814 UpdateTimeMs: 1771489478847 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:38.850004Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489377238374714:2531] 2026-02-19T08:24:38.850026Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.850042Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489377238374715:2532], CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.850105Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489377238374715:2532] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1450 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 988 FinishTimeMs: 1771489478848 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 16 BuildCpuTimeUs: 972 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489478819 UpdateTimeMs: 1771489478848 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:38.850118Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489377238374715:2532] 2026-02-19T08:24:38.850134Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.850155Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489377238374716:2533], trace_id# 2026-02-19T08:24:38.850246Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489377238374716:2533] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2351 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1747 FinishTimeMs: 1771489478848 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 31 BuildCpuTimeUs: 1716 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489478820 UpdateTimeMs: 1771489478848 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:24:38.850264Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489377238374716:2533] 2026-02-19T08:24:38.850316Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489377238374702:2401] trace_id# 2026-02-19T08:24:38.850357Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.017526s ReadRows: 0 ReadBytes: 0 RequestUnits# 11 ForceFlag# true trace_id# 2026-02-19T08:24:38.886359Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:38.886398Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489377238374703:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0ghzff6p815tprn5pztz, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2NTU3NmYtOWRmNGU0OC1kNzEyOTU0MS1hZTFhMGM2OQ==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> TxUsage::WriteToTopic_Demo_13_Query >> KqpBatchDelete::TableWithFullTextIndex [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames |93.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |93.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> BasicUsage::KeyedWriteSession_NoAutoPartitioning_BoundPartitionChooser [GOOD] >> BasicUsage::KeyedWriteSession_EventLoop_Acks >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 >> TSchemeShardUserAttrsTest::VariousUse >> KqpBatchUpdate::TableWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithFullTextIndex [GOOD] Test command err: 2026-02-19T08:24:28.640921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489333724386890:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:28.640991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:28.853541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:28.882817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:28.882938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:28.932045Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:28.934413Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489333724386859:2081] 1771489468639196 != 1771489468639199 2026-02-19T08:24:28.942636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:29.060124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:29.060162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:29.060167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:29.060227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:29.146674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:31.828463Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006bd/r3tmp/spilling-tmp-runner/node_1_b1449cdf-57cc474e-3aacc57a-45f08cda, actor: [1:7608489346609290598:2385] 2026-02-19T08:24:31.828632Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006bd/r3tmp/spilling-tmp-runner 2026-02-19T08:24:33.967333Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967398Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967432Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967457Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967480Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967514Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967551Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967588Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967621Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967654Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967689Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:33.967743Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0bzr8hzx2gvjejv479c4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViNTUyYjYtZmZlYTFmZDktODA0MTdiMDItYTk0YmViZDU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:33.973842Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489355199226271:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0bzr8hzx2gvjejv479c4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViNTUyYjYtZmZlYTFmZDktODA0MTdiMDItYTk0YmViZDU=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:33.973909Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489355199226271:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0bzr8hzx2gvjejv479c4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViNTUyYjYtZmZlYTFmZDktODA0MTdiMDItYTk0YmViZDU=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:33.975406Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:33.975572Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975617Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975658Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975701Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975749Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975789Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975835Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975883Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975933Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:33.975976Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 08489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:36.588486Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:36.588636Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710688. Resolved key sets: 0 2026-02-19T08:24:36.588679Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (OptionalType (StructType '('Count0 (DataType 'Uint64))))) (return (lambda '() (Iterator (AsList (AsStruct '('"column0" (Coalesce (Member %kqp%tx_result_binding_0_0 'Count0) (Uint64 '0)))))))) ) 2026-02-19T08:24:36.588700Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:36.588717Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:24:36.588775Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:36.588996Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489368084129440:2601] 2026-02-19T08:24:36.589038Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489368084129440:2601], channels: 1 2026-02-19T08:24:36.589088Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:36.589110Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:36.589129Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489368084129440:2601] 2026-02-19T08:24:36.589170Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489368084129440:2601], channels: 1 2026-02-19T08:24:36.589204Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368084129440:2601], trace_id# 2026-02-19T08:24:36.589239Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489368084129440:2601], trace_id# 2026-02-19T08:24:36.589265Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:36.589964Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368084129440:2601] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:36.589996Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489368084129440:2601], trace_id# 2026-02-19T08:24:36.590023Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489368084129440:2601], trace_id# 2026-02-19T08:24:36.590299Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489368084129440:2601] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 655 Tasks { TaskId: 1 CpuTimeUs: 474 FinishTimeMs: 1771489476590 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 45 BuildCpuTimeUs: 429 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489476589 UpdateTimeMs: 1771489476590 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:36.590330Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489368084129440:2601] 2026-02-19T08:24:36.590429Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:36.590455Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:36.590485Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489368084129437:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0fn16rnex4mkg9rgzgja, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4MGU0YzAtMzA2MzcyNzItMWZjOTE1M2YtZWQ2YWJhMzg=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000655s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:24:37.507829Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489369088143896:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:37.507882Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:37.524645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:37.598372Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:37.601571Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489369088143865:2081] 1771489477506591 != 1771489477506594 2026-02-19T08:24:37.611695Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:37.611776Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:37.635416Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:37.678224Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:37.678249Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:37.678256Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:37.678319Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:37.828841Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:40.843485Z node 2 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006bd/r3tmp/spilling-tmp-runner/node_2_1c8d2e3d-b1dd15b1-3fe80cda-18168e40, actor: [2:7608489381973046594:2310] 2026-02-19T08:24:40.843691Z node 2 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006bd/r3tmp/spilling-tmp-runner >> KqpBatchPEA::ExecuteState_UnknownEvent [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> TSchemeShardUserAttrsTest::VariousUse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] Test command err: 2026-02-19T08:24:34.531455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489356072562473:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:34.533032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:34.576622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:34.849090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.849177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.878006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:34.884088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:34.925884Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:34.929802Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489356072562437:2081] 1771489474528004 != 1771489474528007 2026-02-19T08:24:35.012530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:35.012554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:35.012562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:35.012650Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:35.115473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:37.449113Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b2/r3tmp/spilling-tmp-runner/node_1_a8cc545-42daf76d-6ee655c4-ca6d6a0c, actor: [1:7608489368957466156:2384] 2026-02-19T08:24:37.449249Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b2/r3tmp/spilling-tmp-runner 2026-02-19T08:24:39.730891Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.730968Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731022Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731050Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731076Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731112Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731171Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731207Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731238Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731280Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731316Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:39.731379Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0hg9f091328ez6vm83z6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGM4OTBmZjItYmZmNjYyNzYtYzFjNTM0ZGEtZDk5ZDFlZTA=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:39.732756Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489377547401825:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0hg9f091328ez6vm83z6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGM4OTBmZjItYmZmNjYyNzYtYzFjNTM0ZGEtZDk5ZDFlZTA=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:39.732820Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489377547401825:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0hg9f091328ez6vm83z6, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGM4OTBmZjItYmZmNjYyNzYtYzFjNTM0ZGEtZDk5ZDFlZTA=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:39.733205Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:39.733357Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733414Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733456Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733498Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733546Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733599Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733647Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733692Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733754Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:39.733798Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [Own ... TablePath: "/Root/TimestampTable" ReadRows: 10 ReadBytes: 80 AffectedPartitions: 1 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpTableExtraStats" value: "\n\013\n\t\256\200\204\200\200\200\204\200\001" } } ComputeCpuTimeUs: 111 BuildCpuTimeUs: 555 WaitOutputTimeUs: 370 WaitInputTimeUs: 1252 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489482561 CreateTimeMs: 1771489482559 UpdateTimeMs: 1771489482562 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:42.563108Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710687. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489390432304969:2702] 2026-02-19T08:24:42.563190Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489390432304964:2601] TxId: 281474976710687. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:42.563208Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489390432304964:2601] TxId: 281474976710687. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:42.563230Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489390432304964:2601] TxId: 281474976710687. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.004500s ReadRows: 20 ReadBytes: 160 RequestUnits# 20 ForceFlag# true trace_id# 2026-02-19T08:24:42.563384Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:42.563417Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:42.563592Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:42.563611Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:42.563736Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710688. Resolved key sets: 0 2026-02-19T08:24:42.563776Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (OptionalType (StructType '('Count0 (DataType 'Uint64))))) (return (lambda '() (Iterator (AsList (AsStruct '('"column0" (Coalesce (Member %kqp%tx_result_binding_0_0 'Count0) (Uint64 '0)))))))) ) 2026-02-19T08:24:42.563788Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:42.563803Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:24:42.563844Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1771489482161} 2026-02-19T08:24:42.564086Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489390432304977:2601] 2026-02-19T08:24:42.564134Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489390432304977:2601], channels: 1 2026-02-19T08:24:42.564183Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:42.564206Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:42.564221Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489390432304977:2601] 2026-02-19T08:24:42.564249Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489390432304977:2601], channels: 1 2026-02-19T08:24:42.564317Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489390432304977:2601], trace_id# 2026-02-19T08:24:42.564356Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489390432304977:2601], trace_id# 2026-02-19T08:24:42.564381Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:42.565109Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489390432304977:2601] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:42.565143Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489390432304977:2601], trace_id# 2026-02-19T08:24:42.565187Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489390432304977:2601], trace_id# 2026-02-19T08:24:42.565505Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489390432304977:2601] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 697 Tasks { TaskId: 1 CpuTimeUs: 499 FinishTimeMs: 1771489482565 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 465 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489482564 UpdateTimeMs: 1771489482565 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:42.565540Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489390432304977:2601] 2026-02-19T08:24:42.565617Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:42.565639Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:42.565660Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489390432304974:2601] TxId: 281474976710688. Ctx: { TraceId: 01khtg0ncd9ynpknzrdna4txms, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiYWM3NWQtZjg0YTBlN2UtNGExMDA5NGQtN2EyMDg0MzM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000697s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::ExecuteState_UnknownEvent [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:34.309355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:34.417469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:34.433018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:34.433074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:34.433500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:34.700546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:34.700686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:34.775028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489471644923 != 1771489471644927 2026-02-19T08:24:34.783978Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:34.829782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:34.962827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:35.301567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:35.301627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:35.301679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:35.302117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:35.314416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:36.574848Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:36.574958Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:36.575205Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:24:36.575283Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:36.575515Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:24:36.575676Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:36.575848Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:24:36.575896Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:24:36.576111Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:36.576498Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:24:36.576556Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:24:36.576657Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:36.576716Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:36.576781Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:24:36.576844Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:24:36.576920Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:24:36.576979Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:24:36.577035Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:36.589171Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:36.589292Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:24:36.589360Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:24:36.590455Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1789 Tasks { TaskId: 1 CpuTimeUs: 1103 FinishTimeMs: 1771489476590 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 31 BuildCpuTimeUs: 1072 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489476577 UpdateTimeMs: 1771489476590 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:36.590530Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIzN2M2NmUtZjZiMzQyYjgtZWQ2Mzk3M2UtOTY5NTM1NGY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1225:2780] 2026-02-19T08:24:36.590627Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg0ftkdrb5xsjha4evendy, Database: /Root, SessionId: yd ... 6Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.195371Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.195452Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:24:43.195500Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:43.195529Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:43.195592Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.195629Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.195695Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:24:43.195736Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:43.195765Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:43.195881Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1323:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.195923Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1323:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:24:43.196000Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1323:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:24:43.196046Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1323:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:43.196076Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1323:3054] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:43.196181Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1319:3054] PartitionIndex# 1 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:43.196253Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 3 TotalPartitions# 4 2026-02-19T08:24:43.196405Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Got tx response. Sender# [2:1321:3054] PartitionIndex# 2 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:43.196444Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:24:43.196525Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Got tx response. Sender# [2:1317:3054] PartitionIndex# 0 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:43.196976Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:24:43.197109Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [2:1323:3054] PartitionIndex# 3 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:24:43.197200Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# INTERNAL_ERROR Issues# [ {
: Error: Got an unknown event in ExecuteState, ActorId = [2:69:2116], EventType = 65538 } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:24:43.197738Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:43.198145Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1325:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1324:3054] trace_id# 2026-02-19T08:24:43.198336Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1325:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:43.198387Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1325:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg0p9z4d8mnj4ebac251cf, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTY1ZTNhYjctMjZhYzA0MDItZGFhMWZkZDAtNTdiNGEzMw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:43.557909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:43.557993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:43.558051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:43.558096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:43.558145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:43.558180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:43.558208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:43.558233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:43.558320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:43.558389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:43.559254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:43.559520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:43.648139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:43.648197Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:43.664514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:43.670303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:43.670548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:43.684608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:43.685330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:43.686147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:43.686529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:43.689811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:43.690043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:43.691337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:43.691397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:43.691535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:43.691594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:43.691639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:43.691846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:43.850496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.851492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.851610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.851710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.851844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.851911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:43.852545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... at schemeshard: 72057594046678944, cookie: 112 2026-02-19T08:24:44.611776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:44.611812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:44.611975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:24:44.612032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:24:44.612160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:44.612193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-02-19T08:24:44.612225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 112, path id: 39 2026-02-19T08:24:44.612247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 112, path id: 40 FAKE_COORDINATOR: Erasing txId 112 2026-02-19T08:24:44.612985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.613071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.613110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2026-02-19T08:24:44.613157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:24:44.613212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:24:44.613601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.613659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.613685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-02-19T08:24:44.613732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-02-19T08:24:44.613759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:24:44.614422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.614503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-02-19T08:24:44.614530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-02-19T08:24:44.614554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:24:44.614593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:24:44.614664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-02-19T08:24:44.615145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:24:44.615195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:24:44.615275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:24:44.617473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-02-19T08:24:44.618248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-02-19T08:24:44.619134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-02-19T08:24:44.619219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-02-19T08:24:44.619581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-02-19T08:24:44.619654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-02-19T08:24:44.620247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-02-19T08:24:44.620338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-02-19T08:24:44.620372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:895:2883] TestWaitNotification: OK eventTxId 112 2026-02-19T08:24:44.621083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:44.621261Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 196us result status StatusSuccess 2026-02-19T08:24:44.621618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2026-02-19T08:24:44.624065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 38 PathVersion: 8 } ApplyIf { PathId: 39 PathVersion: 7 } ApplyIf { PathId: 40 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:24:44.624231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2026-02-19T08:24:44.624351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:24:44.626545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:44.626769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TFlatTest::AutoSplitMergeQueue [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario >> KqpBatchDelete::ColumnTable [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2026-02-19T08:23:13.082355Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489012366726420:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:13.085015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:13.131809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:13.438473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:13.438587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:13.441526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:13.525279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.583768Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.585650Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489012366726283:2081] 1771489393042624 != 1771489393042627 2026-02-19T08:23:13.751877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:13.825597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.849622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:23:13.939315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:23:14.090231Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; A-0 B-0 2026-02-19T08:23:14.704969Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.051s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-02-19T08:23:14.717952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-02-19T08:23:14.742115Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.028s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-02-19T08:23:14.747068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-02-19T08:23:14.821789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-02-19T08:23:14.821945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 6291502 row count 1 2026-02-19T08:23:14.821990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2026-02-19T08:23:14.822088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-02-19T08:23:14.824832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2026-02-19T08:23:15.062920Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.027s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-02-19T08:23:15.126095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2026-02-19T08:23:15.222559Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.112s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2026-02-19T08:23:15.227132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-02-19T08:23:15.227221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583004 row count 2 2026-02-19T08:23:15.227258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2026-02-19T08:23:15.227356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 1, Rows# 2, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:23:15.227430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 599 seconds 2026-02-19T08:23:15.227469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-02-19T08:23:15.227707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:23:15.290040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:113: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 62 ms, with status# 1, next wakeup in# 599.937292s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-02-19T08:23:15.333830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2026-02-19T08:23:15.433995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-02-19T08:23:15.434098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583020 row count 2 2026-02-19T08:23:15.434142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583020 2026-02-19T08:23:15.434217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-02-19T08:23:15.434627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2026-02-19T08:23:15.606159Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.084s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-02-19T08:23:15.725852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2026-02-19T08:23:15.752505Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.098s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2026-02-19T08:23:15.793178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874538 rowCount 3 cpuUsage 0 2026-02-19T08:23:15.823443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-02-19T08:23:15.823572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874538 row count 3 2026-02-19T08:23:15.823618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874538 2026-02-19T08:23:15.823706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-02-19T08:23:15.830103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2026-02-19T08:23:15.986064Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=Done, 2 blobs 1r (max 1), put Spent{time=0.067s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-02-19T08:23:16.111403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 720575940 ... EraseKVRow: 177 WriteKVRow: 208 EraseKVRow: 178 WriteKVRow: 209 EraseKVRow: 179 WriteKVRow: 210 EraseKVRow: 180 WriteKVRow: 211 EraseKVRow: 181 2026-02-19T08:24:26.268392Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found WriteKVRow: 212 EraseKVRow: 182 2026-02-19T08:24:26.395904Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2026-02-19T08:24:26.413552Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037896.1.1546, eph 412} end=Term, 1 blobs 0r (max 201), put Spent{time=0.094s,wait=0.000s,interrupts=0} 2026-02-19T08:24:26.413641Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037896:1:1562} Compact 1147 on TGenCompactionParams{1001: gen 1 epoch 0, 2 parts} step 1546, product {0 parts epoch 0} thrown WriteKVRow: 213 EraseKVRow: 183 2026-02-19T08:24:26.547709Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found WriteKVRow: 214 EraseKVRow: 184 2026-02-19T08:24:26.790137Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found 2026-02-19T08:24:26.790174Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037898 not found 2026-02-19T08:24:26.790192Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found WriteKVRow: 215 EraseKVRow: 185 2026-02-19T08:24:26.941180Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037902 not found 2026-02-19T08:24:26.943022Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037901.1.33, eph 418} end=Term, 1 blobs 0r (max 114), put Spent{time=0.100s,wait=0.000s,interrupts=0} 2026-02-19T08:24:26.943172Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037901:1:49} Compact 21 on TGenCompactionParams{1001: gen 1 epoch 0, 2 parts} step 33, product {0 parts epoch 0} thrown 2026-02-19T08:24:26.944054Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found WriteKVRow: 216 EraseKVRow: 186 2026-02-19T08:24:27.062779Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found WriteKVRow: 217 EraseKVRow: 187 WriteKVRow: 218 2026-02-19T08:24:27.181638Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found EraseKVRow: 188 WriteKVRow: 219 EraseKVRow: 189 2026-02-19T08:24:27.289620Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037904 not found WriteKVRow: 220 EraseKVRow: 190 WriteKVRow: 221 EraseKVRow: 191 WriteKVRow: 222 EraseKVRow: 192 WriteKVRow: 223 EraseKVRow: 193 WriteKVRow: 224 2026-02-19T08:24:27.504149Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037903 not found 2026-02-19T08:24:27.504220Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found EraseKVRow: 194 WriteKVRow: 225 EraseKVRow: 195 2026-02-19T08:24:27.585785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037907 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715708:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037907 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\313\000\000\000" } SchemeshardId: 72057594046644480 WriteKVRow: 226 EraseKVRow: 196 WriteKVRow: 227 2026-02-19T08:24:27.673888Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037912.1.8, eph 440} end=Term, 0 blobs 0r (max 45), put Spent{time=0.003s,wait=0.000s,interrupts=0} 2026-02-19T08:24:27.675103Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037912:1:13} Compact 3 on TGenCompactionParams{1001: gen 255 epoch 0, 2 parts} step 8, product {0 parts epoch 0} thrown EraseKVRow: 197 WriteKVRow: 228 2026-02-19T08:24:27.696990Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037907 not found EraseKVRow: 198 WriteKVRow: 229 EraseKVRow: 199 WriteKVRow: 230 EraseKVRow: 200 WriteKVRow: 231 EraseKVRow: 201 WriteKVRow: 232 EraseKVRow: 202 WriteKVRow: 233 EraseKVRow: 203 WriteKVRow: 234 EraseKVRow: 204 WriteKVRow: 235 EraseKVRow: 205 WriteKVRow: 236 EraseKVRow: 206 WriteKVRow: 237 EraseKVRow: 207 WriteKVRow: 238 EraseKVRow: 208 WriteKVRow: 239 EraseKVRow: 209 WriteKVRow: 240 EraseKVRow: 210 2026-02-19T08:24:28.280595Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found 2026-02-19T08:24:28.280673Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037910 not found 2026-02-19T08:24:28.297013Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037906 not found 2026-02-19T08:24:28.297072Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found WriteKVRow: 241 EraseKVRow: 211 2026-02-19T08:24:28.351198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037912 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715713:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037912 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\334\000\000\000" } SchemeshardId: 72057594046644480 2026-02-19T08:24:28.365040Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037912.1.111, eph 464} end=Term, 0 blobs 0r (max 41), put Spent{time=0.018s,wait=0.000s,interrupts=0} 2026-02-19T08:24:28.365312Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037912:1:126} Compact 81 on TGenCompactionParams{1001: gen 1 epoch 0, 2 parts} step 111, product {0 parts epoch 0} thrown 2026-02-19T08:24:28.367295Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found 2026-02-19T08:24:28.367334Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found WriteKVRow: 242 EraseKVRow: 212 WriteKVRow: 243 2026-02-19T08:24:28.431845Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found 2026-02-19T08:24:28.453123Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037916 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715715:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037916 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\323\000\000\000" } SchemeshardId: 72057594046644480 EraseKVRow: 213 WriteKVRow: 244 EraseKVRow: 214 2026-02-19T08:24:28.553330Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found WriteKVRow: 245 EraseKVRow: 215 WriteKVRow: 246 2026-02-19T08:24:28.593493Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found EraseKVRow: 216 WriteKVRow: 247 EraseKVRow: 217 WriteKVRow: 248 EraseKVRow: 218 WriteKVRow: 249 EraseKVRow: 219 WriteKVRow: 250 EraseKVRow: 220 WriteKVRow: 251 EraseKVRow: 221 WriteKVRow: 252 EraseKVRow: 222 WriteKVRow: 253 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 WriteKVRow: 255 EraseKVRow: 225 WriteKVRow: 256 EraseKVRow: 226 WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 WriteKVRow: 259 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 2026-02-19T08:24:43.798719Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found 2026-02-19T08:24:43.798788Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found 2026-02-19T08:24:43.798821Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037918 not found 2026-02-19T08:24:44.684020Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2026-02-19T08:24:44.684124Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2026-02-19T08:24:44.684162Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2026-02-19T08:24:44.684212Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found 2026-02-19T08:24:44.768545Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found 2026-02-19T08:24:44.768588Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] Test command err: 2026-02-19T08:24:32.372466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489350108137192:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:32.372956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:32.712657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:32.712888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:32.775835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:32.798781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489350108137151:2081] 1771489472367545 != 1771489472367548 2026-02-19T08:24:32.811023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:32.813421Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:32.897534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:32.897570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:32.897579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:32.897663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:32.968136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:35.585279Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b5/r3tmp/spilling-tmp-runner/node_1_3f934da5-4f928d95-b408a94c-72b40e60, actor: [1:7608489362993040872:2385] 2026-02-19T08:24:35.585465Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b5/r3tmp/spilling-tmp-runner 2026-02-19T08:24:37.830570Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830630Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830660Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830686Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830716Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830761Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830810Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830849Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830885Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830928Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.830981Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:37.831055Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0fn22g0mjaj9va2sc6ma, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjYTk1ZjAtZTAyNjlkYzQtMjIwNzc4ZDAtZjY0YmE3YWM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:37.831315Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489371582976546:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0fn22g0mjaj9va2sc6ma, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjYTk1ZjAtZTAyNjlkYzQtMjIwNzc4ZDAtZjY0YmE3YWM=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:37.831356Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489371582976546:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg0fn22g0mjaj9va2sc6ma, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2FjYTk1ZjAtZTAyNjlkYzQtMjIwNzc4ZDAtZjY0YmE3YWM=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:37.833794Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:37.833971Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834031Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834087Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834188Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834268Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834320Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834494Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834555Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834614Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:37.834666Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... cuter tasks construction 2026-02-19T08:24:44.974840Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489399665676550:2326] TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# PrepareState ComputeActor# [2:7608489399665676683:2804] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 11463 Tasks { TaskId: 1 CpuTimeUs: 467 FinishTimeMs: 1771489484974 Tables { TablePath: "/Root/TestOlap" WriteRows: 3 WriteBytes: 704 AffectedPartitions: 3 } EgressBytes: 960 EgressRows: 3 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 444 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489484935 UpdateTimeMs: 1771489484974 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpTaskExtraStats" value: "\022\000" } } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:44.974938Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489399665676683:2804] 2026-02-19T08:24:44.975142Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:254} ActorId: [2:7608489399665676550:2326] TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Send Flush to BufferActor buffer_actor_id# [2:7608489399665676549:2326] trace_id# 2026-02-19T08:24:44.975183Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489399665676550:2326] TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.011463s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:24:44.975347Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489399665676550:2326] TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:44.975378Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489399665676550:2326] TxId: 281474976715661. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:44.975575Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:44.975859Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:44.976017Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715662. Resolved key sets: 0 2026-02-19T08:24:44.976081Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:24:44.976135Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:44.976157Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:44.976212Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7608489399665676549:2326] trace_id# 2026-02-19T08:24:44.976258Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:24:44.986686Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:44.986725Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489399665676718:2326] TxId: 281474976715662. Ctx: { TraceId: 01khtg0r1tb9b5zeafrj0dpknj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:45.286435Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:45.286501Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:24:45.286543Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:24:45.286583Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Nothing (OptionalType (DataType 'Int32))) $1) '((Just (Int32 '"3")) $1))))))))))) )))) ) trace_id# 2026-02-19T08:24:45.286613Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:24:45.288236Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:24:45.304106Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:45.304195Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 7199.697448s cancel_after# trace_id# 2026-02-19T08:24:45.304220Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:24:45.304249Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Nothing (OptionalType (DataType 'Int32))) $1) '((Just (Int32 '"3")) $1))))))))))) )))) ) trace_id# 2026-02-19T08:24:45.304282Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:24:45.304992Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:24:45.305335Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:45.305582Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:7608489403960644054:2326] TxId: 281474976715663. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:7608489403960644053:2326] trace_id# 2026-02-19T08:24:45.305742Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489403960644054:2326] TxId: 281474976715663. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:45.305763Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489403960644054:2326] TxId: 281474976715663. Ctx: { TraceId: 01khtg0r698ndaeh4kpbhgdgqp, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIzNjcyYjAtZGY5Yzk1MjEtYzZlZWZjM2MtZGRkY2I4NTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:46.150740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:46.150827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:46.150887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:46.150939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:46.150993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:46.151029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:46.151072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:46.151100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:46.151201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:46.151274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:46.152082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:46.152382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:46.242874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:46.242956Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:46.254980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:46.255567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:46.255779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:46.267512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:46.268236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:46.268939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:46.269266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:46.272516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:46.272730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:46.273986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:46.274044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:46.274164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:46.274216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:46.274258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:46.274428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:46.419553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.420545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.420656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.420789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.420864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.420926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:46.421575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... alue: "ValA2" } } ApplyIf { PathId: 38 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:24:47.227529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:24:47.227647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-02-19T08:24:47.227692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-02-19T08:24:47.227814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:24:47.227869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:24:47.232455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:47.232748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2026-02-19T08:24:47.232998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:24:47.233047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:24:47.233114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-02-19T08:24:47.233251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:24:47.238906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-02-19T08:24:47.239065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-02-19T08:24:47.239514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:47.239645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:47.239701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-02-19T08:24:47.239931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:24:47.239973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:24:47.240016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:24:47.240051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:24:47.240111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:24:47.240183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:24:47.240248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:24:47.240306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:24:47.240364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:24:47.240401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:24:47.240477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:24:47.240519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-02-19T08:24:47.240561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:24:47.243294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:47.243362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:24:47.243599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:47.243648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:24:47.244284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:24:47.244390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:24:47.244445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:24:47.244537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:24:47.244589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:24:47.244712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:24:47.247048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:24:47.247380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:24:47.247450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:24:47.247944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:24:47.248044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:24:47.248084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:749:2737] TestWaitNotification: OK eventTxId 103 2026-02-19T08:24:47.248649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:47.248877Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 219us result status StatusSuccess 2026-02-19T08:24:47.249322Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] >> TxUsage::WriteToTopic_Demo_24_Table >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::CheckId [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath >> EntityId::Order >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:48.445377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:48.445473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:48.445551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:48.445605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:48.445645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:48.445682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:48.445799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:48.445830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:48.445917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:48.446004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:48.446906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:48.447225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:48.534707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:48.534779Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:48.549623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:48.550363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:48.550557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:48.563179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:48.563957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:48.564817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:48.565205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:48.568437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:48.568629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:48.569884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:48.569949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:48.570076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:48.570118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:48.570157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:48.570318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:48.698748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.699708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.699811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.699897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:48.700713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 8:24:49.327574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.327647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.327695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:24:49.327723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:24:49.327750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:24:49.327823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:24:49.330259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-02-19T08:24:49.330417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-02-19T08:24:49.330908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:49.331015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:49.331082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:35: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038, at schemeshard: 72057594046678944 2026-02-19T08:24:49.331233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:24:49.331375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:24:49.331432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:24:49.332209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:24:49.332347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:24:49.333837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:49.333884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:49.334036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:24:49.334144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:49.334189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:24:49.334234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:24:49.334491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:24:49.334529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:24:49.334615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:24:49.334646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:24:49.334681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:24:49.334714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:24:49.334760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:24:49.334816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:24:49.334868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:24:49.334895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:24:49.334957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:24:49.334991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:24:49.335023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:24:49.335047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:24:49.335726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.335823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.335860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:24:49.335911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:24:49.335953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:24:49.336640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.336725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:24:49.336756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:24:49.336787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:24:49.336821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:24:49.336888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:24:49.340332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:24:49.340961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-02-19T08:24:49.343083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:24:49.343333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:24:49.343464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2026-02-19T08:24:49.345285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:49.345498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2026-02-19T08:22:48.065689Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488903431450850:2196];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:48.076313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:48.669634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:48.722458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:48.722551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:48.752643Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:48.783786Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:3850) connection closed with error: Connection refused 2026-02-19T08:22:48.790465Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:22:48.807102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:48.963841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:49.048247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:53.237836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:53.238035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:53.260725Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:53.269969Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608488921334692585:2081] 1771489372885453 != 1771489372885456 2026-02-19T08:22:53.287291Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:53.287391Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:53.356206Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:11925) connection closed with error: Connection refused 2026-02-19T08:22:53.366789Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:22:53.394068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:53.505790Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:53.943666Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:57.599111Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608488941948456732:2224];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:57.599274Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:57.659356Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:22:57.807627Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:57.807726Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:57.834873Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:57.842208Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:2658) connection closed with error: Connection refused 2026-02-19T08:22:57.847651Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:22:57.854831Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:57.886619Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:22:58.622034Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:02.288826Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:02.289619Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608488963849333734:2237];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:02.307504Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:02.353812Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:02.481649Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608488963849333507:2081] 1771489382228265 != 1771489382228268 2026-02-19T08:23:02.547115Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:02.547224Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:02.547957Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:11870) connection closed with error: Connection refused 2026-02-19T08:23:02.552699Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:23:02.554036Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:02.567364Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:02.601790Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:03.055855Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:03.261994Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:06.961133Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608488980762739189:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:06.961418Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:06.987145Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.123589Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:07.144842Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:07.144926Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:07.147686Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:24205) connection closed with error: Connection refused 2026-02-19T08:23:07.151154Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:23:07.152738Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:07.181816Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:23:07.965883Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:23:11.189240Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608489003543487121:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:23:11.189394Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:23:11.219655Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:23:11.319299Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:11.320621Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:11.320707Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:11.351409Z node 6 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:4285) connection closed with error: Connection refused 2026-02-19T08:23:11.353477Z node 6 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:23:11.361175Z node 6 :HIVE WA ... 7371 != 1771489455457374 2026-02-19T08:24:15.590307Z node 18 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:62363) connection closed with error: Connection refused 2026-02-19T08:24:15.591420Z node 18 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:15.592578Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:15.592663Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:15.601360Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:15.639998Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:16.463468Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:20.905444Z node 19 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7608489299670331478:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:20.905516Z node 19 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:20.927981Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:21.034239Z node 19 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:21.036197Z node 19 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [19:7608489299670331447:2081] 1771489460904526 != 1771489460904529 2026-02-19T08:24:21.052787Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:21.052891Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:21.080868Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:24496) connection closed with error: Connection refused 2026-02-19T08:24:21.084098Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:21.084757Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:21.091989Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:21.914202Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:26.727529Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7608489322672280107:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:26.728083Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:26.761501Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:26.874207Z node 20 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:26.898438Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:26.898542Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:26.926469Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:24722) connection closed with error: Connection refused 2026-02-19T08:24:26.928235Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:26.939814Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:26.967264Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:27.736894Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:32.572379Z node 21 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7608489349238518050:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:32.572493Z node 21 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:32.607813Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:32.669806Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:32.804435Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:32.804539Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:32.808688Z node 21 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:32.812123Z node 21 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [21:7608489349238517927:2081] 1771489472566038 != 1771489472566041 2026-02-19T08:24:32.846357Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#34,[::1]:28702) connection closed with error: Connection refused 2026-02-19T08:24:32.850789Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:32.854110Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:32.866946Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:33.592991Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:38.365298Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7608489375251936033:2173];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:38.365607Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:38.388973Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:38.493007Z node 22 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:38.494434Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:38.494526Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:38.531786Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#36,[::1]:17128) connection closed with error: Connection refused 2026-02-19T08:24:38.537452Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:38.540950Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:38.640738Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:39.370853Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:44.232443Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7608489402387873622:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:44.232497Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:44.272421Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:24:44.389484Z node 23 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:44.393973Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7608489402387873592:2081] 1771489484229420 != 1771489484229423 2026-02-19T08:24:44.407519Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:44.407636Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:44.421226Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#38,[::1]:31338) connection closed with error: Connection refused 2026-02-19T08:24:44.430047Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:24:44.436891Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:44.515022Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:24:45.237863Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TxUsage::WriteToTopic_Demo_13_Query [GOOD] >> TSchemeShardUserAttrsTest::Boot >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:50.827255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:50.827343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:50.827405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:50.827456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:50.827501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:50.827555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:50.827589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:50.827618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:50.827691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:50.827786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:50.828605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:50.828904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:50.909196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:50.909240Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:50.918738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:50.919423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:50.919608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:50.928920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:50.929646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:50.930402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:50.930768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:50.934034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:50.934237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:50.935408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:50.935465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:50.935614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:50.935666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:50.935712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:50.935896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:51.093548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.094520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.094621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.094699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.094865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.094927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:51.095533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Operation and all the parts is done, operation id: 105:0 2026-02-19T08:24:51.715216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:0 2026-02-19T08:24:51.715268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:24:51.715299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-02-19T08:24:51.715339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:24:51.715368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-02-19T08:24:51.716759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:24:51.717681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:24:51.718575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:51.718623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:51.718752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:24:51.718897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:51.718933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-02-19T08:24:51.718966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2026-02-19T08:24:51.719578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:24:51.719660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:24:51.719688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:24:51.719743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:24:51.719788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:24:51.720119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:24:51.720198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:24:51.720226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:24:51.720247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:24:51.720270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:24:51.720340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-02-19T08:24:51.720555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:24:51.720608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:24:51.720712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:24:51.723249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:24:51.723769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:24:51.723876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-02-19T08:24:51.724172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:24:51.724210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-02-19T08:24:51.724623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:24:51.724709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:24:51.724738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:801:2789] TestWaitNotification: OK eventTxId 105 2026-02-19T08:24:51.725287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:51.725487Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 181us result status StatusPathDoesNotExist 2026-02-19T08:24:51.725649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:24:51.726178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:51.726338Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 157us result status StatusSuccess 2026-02-19T08:24:51.726791Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DstCreator::WithSyncIndexAndIntermediateDir >> DstCreator::ExistingDst >> DstCreator::NonExistentSrc >> TxUsage::WriteToTopic_Demo_14_Table >> IncrementalBackup::OmitIndexesIncrementalBackup [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> TSchemeShardUserAttrsTest::Boot [GOOD] >> DstCreator::WithSyncIndex >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:52.682763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:52.682859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:52.682916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:52.682949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:52.682982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:52.683017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:52.683069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:52.683105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:52.683176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:52.683247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:52.684053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:52.684337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:52.774321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:52.774387Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:52.786969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:52.787644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:52.787833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:52.798794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:52.799500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:52.800222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:52.800541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:52.803624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:52.803814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:52.804982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:52.805035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:52.805149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:52.805190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:52.805227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:52.805413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:52.958176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.959974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.960045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:52.960105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 710672 2026-02-19T08:24:53.550729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-02-19T08:24:53.550842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-02-19T08:24:53.550887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-02-19T08:24:53.558104Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-02-19T08:24:53.567871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:24:53.568099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.568338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:24:53.568399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:24:53.568590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:24:53.568688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:53.572292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:53.572490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:24:53.572679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.572738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:24:53.572784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:24:53.572826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:24:53.575002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.575059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:24:53.575125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:24:53.577328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.577389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.577435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:24:53.577497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:24:53.577642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:24:53.579359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:24:53.579512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:24:53.579846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:53.580003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:24:53.580055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:24:53.580375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:24:53.580439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:24:53.580617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:24:53.580690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:53.582727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:53.582781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:53.582979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:53.583020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:24:53.583415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.583477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:24:53.583571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:24:53.583615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:24:53.583657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:24:53.583692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:24:53.583728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:24:53.583767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:24:53.583799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:24:53.583829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:24:53.583905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:24:53.583949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:24:53.583984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:24:53.584546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:24:53.584670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:24:53.584715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:24:53.584756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:24:53.584795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:24:53.584882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:24:53.587895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:24:53.588373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> DstCreator::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:24:52.990021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:24:52.990103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:52.990164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:24:52.990207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:24:52.990239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:24:52.990300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:24:52.990335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:24:52.990381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:24:52.990448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:24:52.990530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:24:52.991275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:24:52.991568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:24:53.075189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:53.075247Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:53.086761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:24:53.087359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:24:53.087533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:24:53.098171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:24:53.098889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:24:53.099527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:24:53.099789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:24:53.102971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:53.103141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:24:53.104181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:24:53.104240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:24:53.104338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:24:53.104383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:24:53.104417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:24:53.104576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:24:53.251990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.252992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.253934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:24:53.254016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 2057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:24:53.914915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:24:53.915030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 Forgetting tablet 72075186233409548 2026-02-19T08:24:53.916512Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-02-19T08:24:53.916594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:24:53.916696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2026-02-19T08:24:53.917298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:24:53.917402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409549 2026-02-19T08:24:53.918050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:24:53.918089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:24:53.918186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:24:53.918881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:24:53.918926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:24:53.918971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:24:53.920251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:24:53.920283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:24:53.920423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6237: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-02-19T08:24:53.920989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:24:53.921024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:24:53.921094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:24:53.921105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:24:53.922886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:24:53.922912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:24:53.922972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6237: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-02-19T08:24:53.923033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:24:53.923053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:24:53.923088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:24:53.923108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:24:53.923172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:24:53.923216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 100 2026-02-19T08:24:53.923412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:24:53.923451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:24:53.923518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:24:53.923531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:24:53.923803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:24:53.923860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:24:53.923882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:1062:2975] 2026-02-19T08:24:53.924003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:24:53.924040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:24:53.924052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:1062:2975] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:24:53.924331Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:53.924462Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 135us result status StatusPathDoesNotExist 2026-02-19T08:24:53.924574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:24:53.924857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:24:53.924956Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 113us result status StatusSuccess 2026-02-19T08:24:53.925233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 11 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> IncrementalBackup::BasicIndexIncrementalRestore >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> KqpBatchUpdate::SimpleOnePartition [GOOD] >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::SameOwner >> DstCreator::WithIntermediateDir >> DstCreator::WithSyncIndex [GOOD] >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2026-02-19T08:24:53.004785Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489438338029954:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:53.005381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:53.190772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:53.207481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:53.207610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:53.267064Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:53.271366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489434043062604:2081] 1771489492999817 != 1771489492999820 2026-02-19T08:24:53.284301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:53.464318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:53.521003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:53.521052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:53.521066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:53.521179Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:53.745476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:53.756033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:54.008173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:54.108249Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:54.108278Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:54.108686Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:55.897254Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489494054, tx_id: 281474976710658 } } } 2026-02-19T08:24:55.897792Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:55.900266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.903479Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:24:55.903505Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:24:55.943546Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:24:55.978831Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489495979 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 43 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: ... ImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 44 PathOwnerId: 72057594046644480 } 2026-02-19T08:24:55.988284Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 44] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489495979 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: 2026-02-19T08:24:18.281437Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489287557771714:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:18.282838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:18.478784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:18.506659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:18.506760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:18.549101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:18.575713Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:18.576880Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489287557771683:2081] 1771489458279569 != 1771489458279572 2026-02-19T08:24:18.660500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:18.660525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:18.660533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:18.660616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:18.726216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:21.287681Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006cf/r3tmp/spilling-tmp-runner/node_1_e5687ca7-beb693f3-16291dbc-270cb92b, actor: [1:7608489300442675411:2384] 2026-02-19T08:24:21.287841Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006cf/r3tmp/spilling-tmp-runner 2026-02-19T08:24:23.454549Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454608Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454641Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454667Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454692Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454733Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454765Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454795Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454828Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454861Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454902Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:23.454962Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg01pp2xh4n03ea4jy3xqy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ4ZTU4NzItM2JkYjMwODQtZGYwYjQ5MjEtN2FjYjBmYjE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:23.455264Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489309032611080:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg01pp2xh4n03ea4jy3xqy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ4ZTU4NzItM2JkYjMwODQtZGYwYjQ5MjEtN2FjYjBmYjE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:23.455302Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489309032611080:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg01pp2xh4n03ea4jy3xqy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ4ZTU4NzItM2JkYjMwODQtZGYwYjQ5MjEtN2FjYjBmYjE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:23.455627Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:23.455796Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.455854Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.455892Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.455932Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.455979Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.456019Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.456056Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.456130Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.456203Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:23.456263Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 2618] 2026-02-19T08:24:55.658455Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:7608489447220309882:2618], channels: 1 2026-02-19T08:24:55.658477Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:7608489447220309883:2619], channels: 2 2026-02-19T08:24:55.658523Z node 4 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:24:55.658539Z node 4 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:24:55.658553Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [4:7608489447220309883:2619] 2026-02-19T08:24:55.658572Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [4:7608489447220309882:2618] 2026-02-19T08:24:55.658596Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:7608489447220309882:2618], channels: 1 2026-02-19T08:24:55.658617Z node 4 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:7608489447220309883:2619], channels: 2 2026-02-19T08:24:55.658654Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.658683Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.658704Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:24:55.659637Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489447220309882:2618] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:55.659668Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.659697Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.659727Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489447220309883:2619] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:24:55.659748Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.659769Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489447220309882:2618], CA [4:7608489447220309883:2619], trace_id# 2026-02-19T08:24:55.661681Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489447220309883:2619] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 915 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 226 FinishTimeMs: 1771489495661 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 93 BuildCpuTimeUs: 133 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489495661 CreateTimeMs: 1771489495659 UpdateTimeMs: 1771489495661 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:55.661734Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489447220309883:2619] 2026-02-19T08:24:55.661765Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489447220309882:2618], trace_id# 2026-02-19T08:24:55.661788Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:7608489447220309882:2618], trace_id# 2026-02-19T08:24:55.661921Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489447220309882:2618] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1221 Tasks { TaskId: 1 CpuTimeUs: 788 FinishTimeMs: 1771489495661 OutputBytes: 1 Tables { TablePath: "/Root/Test" ReadRows: 2 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 2 ComputeCpuTimeUs: 79 BuildCpuTimeUs: 709 WaitOutputTimeUs: 20 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489495661 CreateTimeMs: 1771489495658 UpdateTimeMs: 1771489495661 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:24:55.661962Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489447220309882:2618] 2026-02-19T08:24:55.662054Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:24:55.662072Z node 4 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:24:55.662098Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [4:7608489447220309876:2536] TxId: 281474976710687. Ctx: { TraceId: 01khtg124w0r94nx0fs3x6g38e, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDQzZWE5NGUtZGUyYzM5OGUtYjMyOTlhZDYtZmIyNjIzNWM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002136s ReadRows: 2 ReadBytes: 22 RequestUnits# 2 ForceFlag# true trace_id# >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query [GOOD] >> TxUsage::WriteToTopic_Demo_43_Query >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> ColumnShardTiers::TieringUsage [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2026-02-19T08:24:54.118143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489444552478555:2146];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:54.118260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:54.308033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:54.308123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:54.343916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:54.347426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:54.388711Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:54.400118Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489444552478446:2081] 1771489494101897 != 1771489494101900 2026-02-19T08:24:54.524578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:54.625372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:54.625442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:54.625467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:54.625556Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:54.835213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:54.843900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.123092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:55.134341Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:55.134359Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:55.134871Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:57.041187Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489495125, tx_id: 281474976710658 } } } 2026-02-19T08:24:57.041684Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:57.043569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:57.045836Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:24:57.045885Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:24:57.084844Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:24:57.119614Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489497120 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 Pl ... ndexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 43 PathOwnerId: 72057594046644480 } 2026-02-19T08:24:57.133214Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 43] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489497120 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table >> BasicUsage::KeyedWriteSession_EventLoop_Acks [GOOD] >> BasicUsage::KeyedWriteSession_MultiThreadedWrite_Acks >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: 2026-02-19T08:24:39.569159Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489379590800125:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:39.574312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:39.853925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:39.854032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:39.875115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:39.891977Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:39.892810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:39.893766Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489379590800084:2081] 1771489479563941 != 1771489479563944 2026-02-19T08:24:39.981056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:39.981085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:39.981098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:39.981188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:40.117316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:40.338546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:40.338907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:40.340340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:40.341057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:40.343570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489480390, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:40.345578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:40.345652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-02-19T08:24:40.345917Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489379590800611:2244] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:40.346210Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489379590800052:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.346325Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489379590800055:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.346830Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379590800578:2221][/Root] Path was updated to new version: owner# [1:7608489379590800359:2108], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.346867Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489379590800058:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.347559Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489379590800611:2244] Ack update: ack to# [1:7608489379590800440:2147], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:40.347742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-02-19T08:24:40.348187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:40.348252Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379590800729:2292][/Root] Path was updated to new version: owner# [1:7608489379590800722:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.348509Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379590800730:2293][/Root] Path was updated to new version: owner# [1:7608489379590800723:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.189775Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489395965326533:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:43.189855Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:43.219281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:43.315549Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:43.317118Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489395965326505:2081] 1771489483185789 != 1771489483185792 2026-02-19T08:24:43.327045Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:43.327133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:43.353452Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:43.436210Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:43.436240Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:43.436250Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:43.436327Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:43.446048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:43.820593Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:43.820915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:43.820964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:43.821625Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:43.823650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489483869, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:43.825227Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:43.825572Z node 2 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [2:7608489395965327040:2253] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:43.825977Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7608489395965326473:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:43.826078Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7608489395965326476:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:43.826120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-02-19T08:24:43.826134Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7608489395965326479:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, ... 08:24:51.651644Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:51.651675Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:51.651684Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:51.651759Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:51.664739Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:52.117487Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:52.165361Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:52.215355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:52.351273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:52.416336Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:52.425971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.169656Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608489449691445016:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.169784Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.170228Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608489449691445025:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.170268Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.533328Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.564332Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.597678Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.629534Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.658476Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.694073Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.732806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.779636Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:55.859583Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608489449691445898:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.859694Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.861269Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608489449691445903:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.861339Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608489449691445904:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.861511Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:55.865294Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:24:55.878748Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608489449691445907:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:24:55.969120Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608489449691445958:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:24:56.406601Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608489432511573996:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:56.406691Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:24:57.654342Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-02-19T08:24:57.667108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-02-19T08:24:57.749928Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608489458281380932:2547], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2026-02-19T08:24:57.750850Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=OTAxODlhZGMtYWIxN2IxMDEtYmUxNjA4YzMtYjFjMjBmMg==, ActorId: [4:7608489458281380930:2546], ActorState: ExecuteState, LegacyTraceId: 01khtg14jydj60ek06gprkxrg6, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 17 } message: "Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn\'t exist, please contact internal support" end_position { row: 2 column: 17 } severity: 1 } } tx_id# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:22:01.865329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:02.028388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:22:02.054127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:22:02.054211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:22:02.054787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:22:02.376582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:02.376737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:02.462210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489318338191 != 1771489318338195 2026-02-19T08:22:02.472127Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:02.522128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:02.632122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:22:03.173918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:22:03.173983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:22:03.174020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:22:03.176705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:03.177552Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:03.196594Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-02-19T08:22:13.940455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:904:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:13.940771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:13.946296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:931:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:13.946407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:13.950249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:14.424259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.424404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.425023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1028:2854], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.425130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.425224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:14.430534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:15.312558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1033:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:22:15.382040Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1123:2922] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:16.337515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:16.831796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:17.612005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:18.649498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:19.142764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:22:19.740965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:20.782312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-02-19T08:22:37.063621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA S ... 6c11f1-95dda57d-a77c27e9;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=79f6db10-d6c11f1-95dda57d-a77c27e9; 2026-02-19T08:24:57.911260Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=129656;raw_bytes=3677612;count=3;records=3048} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2026-02-19T08:24:57.911571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadBlobs;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.911873Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-02-19T08:24:57.912203Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.912355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.912622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.918289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:2959:4328];task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;task_class=CS::GENERAL;fline=general_compaction.cpp:138;event=blobs_created_diff;appended=0;;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:192];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:192:232];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:424:256];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:680:192];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:872:264];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1520:192];;;;switched=(portion_id:20;path_id:1000000895;records_count:1;schema_version:2;level:0;cs:plan_step=1762851225500;tx_id=18446744073709551615;;wi:10;;column_size:1328;index_size:0;meta:(()););(portion_id:19;path_id:1000000895;records_count:1;schema_version:2;level:0;;column_size:1712;index_size:0;meta:(()););; 2026-02-19T08:24:57.918392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:2959:4328];task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2026-02-19T08:24:57.918467Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:2959:4328];task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.918650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2026-02-19T08:24:57.918780Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:63;event=Limiter; 2026-02-19T08:24:57.918819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=AskDiskQuota;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.918856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Writing;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.919206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 72075186224037893 2026-02-19T08:24:57.919328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=70;sum=2726;count=57; 2026-02-19T08:24:57.919391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:66;memory_size=190;data_size=182;sum=5510;count=58;size_of_meta=112; 2026-02-19T08:24:57.919440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:44;memory_size=270;data_size=262;sum=7830;count=29;size_of_portion=192; 2026-02-19T08:24:57.919553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.919702Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[49] (CS::GENERAL) apply at tablet 72075186224037893 2026-02-19T08:24:57.920552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:2959:4328];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037893;external_task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; 2026-02-19T08:24:57.920602Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:17 Blob count: 1 2026-02-19T08:24:57.920706Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=129656;raw_bytes=3677612;count=3;records=3048} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=51;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=53;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=54;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=50;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=52;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=79f6db10-d6c11f1-95dda57d-a77c27e9; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=CLEANUP_PORTIONS;event=free;usage=560;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=GLOBAL;event=free;usage=6240;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=CLEANUP_PORTIONS;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=GLOBAL;event=free;usage=5680;delta=560; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=79f7280e-d6c11f1-9103be5e-d3fa68c9; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=COMPACTION;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=GLOBAL;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=COMPACTION;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=GLOBAL;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=COMPACTION;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:92;name=GLOBAL;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 >> DstCreator::ColumnsSizeMismatch >> DstCreator::ReplicationModeMismatch >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> DstCreator::EmptyReplicationConfig [GOOD] >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> DstCreator::GlobalConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2026-02-19T08:24:53.070393Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489439464593693:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:53.071580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:53.269733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:53.276426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:53.276543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:53.346388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:53.349629Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:53.490911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:53.596506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:53.596532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:53.596558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:53.596653Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:53.796741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:53.804597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:53.909129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:53.934139Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:53.934156Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:53.934578Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:54.074728Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:55.904783Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489493900, tx_id: 281474976710658 } } } 2026-02-19T08:24:55.905067Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:55.939934Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:24:55.942543Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489493970 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:24:55.942767Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:24:56.534034Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489451229314308:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:56.534097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:56.557838Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:56.619983Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:56.621327Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489451229314279:2081] 1771489496532995 != 1771489496532998 2026-02-19T08:24:56.659439Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:56.659527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:56.665252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:56.730844Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:56.891940Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:56.891967Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:56.891975Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:56.892075Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:57.135389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:57.141380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:24:57.144290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:57.175729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:57.206200Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:57.206233Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:57.206636Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:57.584217Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:59.648306Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489497211, tx_id: 281474976715658 } } } 2026-02-19T08:24:59.648565Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:59.649871Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:24:59.650808Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1771489497246 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:24:59.650972Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2026-02-19T08:24:53.196755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489441690696899:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:53.196839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:53.450116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:53.486372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:53.486511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:53.489865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:53.562268Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:53.563200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489441690696869:2081] 1771489493194394 != 1771489493194397 2026-02-19T08:24:53.731161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:53.734785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:53.734816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:53.734827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:53.734892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:53.931813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:53.937953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:53.943479Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:53.943506Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:53.944073Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:54.205656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:56.061818Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-02-19T08:24:56.061866Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2026-02-19T08:24:56.719683Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489453873661423:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:56.721053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:56.728490Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:56.818623Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:56.821857Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489453873661377:2081] 1771489496716306 != 1771489496716309 2026-02-19T08:24:56.821919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:56.856829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:56.856888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:56.859225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:57.032712Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:57.046248Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:57.046270Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:57.046276Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:57.046336Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:57.274615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:57.283109Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:57.393291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:57.428028Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:57.428049Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:57.428497Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:57.726323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:59.875376Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489497372, tx_id: 281474976710658 } } } 2026-02-19T08:24:59.875698Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:59.908260Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:24:59.911323Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489497456 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:24:59.911549Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> DstCreator::CannotFindColumn [GOOD] >> DDisk::PersistentBufferWithRestarts [GOOD] >> PersistentBufferSpaceAllocator::AddNewChunkAndOccupyAllSpace [GOOD] >> TxUsage::WriteToTopic_Demo_14_Table [GOOD] >> TxUsage::WriteToTopic_Demo_24_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] Test command err: 2026-02-19T08:22:57.802222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:57.802297Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:58.083113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:59.443081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:22:59.766490Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.767540Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.782708Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7532901725336945374 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:59.913582Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.914408Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.914645Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8638663830601637176 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:59.919404Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:59.977592Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:59.982391Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:59.982661Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15929963775911187756 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:23:00.067049Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:23:00.067650Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:23:00.067864Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmptmpm03/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10509830802185888882 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceI ... disk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9184010291684830376 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:54.914699Z node 146 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:24:54.915342Z node 146 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:24:54.915678Z node 146 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15051841567123370113 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:54.923077Z node 146 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:24:54.972907Z node 147 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:24:54.973448Z node 147 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:24:54.973754Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmps7RRhb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2112021201261221749 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:24:55.484267Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:55.484375Z node 145 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:55.505114Z node 145 :STATISTICS WARN: tx_init.cpp:292: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-02-19T08:24:59.106168Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:24:59.106290Z node 154 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:59.240887Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:00.220362Z node 162 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:25:00.221034Z node 162 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmpSlW2RJ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:25:00.221429Z node 162 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmpSlW2RJ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/0036b3/r3tmp/tmpSlW2RJ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17628553608535879181 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 >> KqpBatchUpdate::Large_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_ddisk/unittest >> PersistentBufferSpaceAllocator::AddNewChunkAndOccupyAllSpace [GOOD] Test command err: RandomSeed# 14678756916385914464 next iteration write persistent buffer offset# 12288 size# 12288 lsn# 1 letter# . write persistent buffer offset# 12288 size# 4096 lsn# 2 letter# / read persistent buffer offset# 12288 size# 4096 lsn# 2 write persistent buffer offset# 12288 size# 4096 lsn# 3 letter# 0 read persistent buffer offset# 12288 size# 4096 lsn# 3 read persistent buffer offset# 12288 size# 4096 lsn# 3 read persistent buffer offset# 12288 size# 4096 lsn# 3 read persistent buffer offset# 12288 size# 4096 lsn# 3 write persistent buffer offset# 45056 size# 12288 lsn# 4 letter# 1 list read persistent buffer offset# 45056 size# 12288 lsn# 4 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 erase persistent buffer offset# 12288 size# 4096 lsn# 3 erase persistent buffer offset# 12288 size# 4096 lsn# 3 list read persistent buffer offset# 45056 size# 12288 lsn# 4 erase persistent buffer offset# 45056 size# 12288 lsn# 4 erase persistent buffer offset# 45056 size# 12288 lsn# 4 write persistent buffer offset# 20480 size# 8192 lsn# 5 letter# 2 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 20480 size# 4096 lsn# 6 letter# 3 list write persistent buffer offset# 32768 size# 4096 lsn# 7 letter# 4 write persistent buffer offset# 24576 size# 12288 lsn# 8 letter# 5 read persistent buffer offset# 32768 size# 4096 lsn# 7 read persistent buffer offset# 32768 size# 4096 lsn# 7 read persistent buffer offset# 24576 size# 12288 lsn# 8 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 45056 size# 4096 lsn# 9 letter# 6 write persistent buffer offset# 53248 size# 12288 lsn# 10 letter# 7 erase persistent buffer offset# 45056 size# 4096 lsn# 9 erase persistent buffer offset# 45056 size# 4096 lsn# 9 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list write persistent buffer offset# 57344 size# 8192 lsn# 11 letter# 8 list erase persistent buffer offset# 57344 size# 8192 lsn# 11 erase persistent buffer offset# 57344 size# 8192 lsn# 11 write persistent buffer offset# 4096 size# 4096 lsn# 12 letter# 9 write persistent buffer offset# 4096 size# 8192 lsn# 13 letter# A read persistent buffer offset# 4096 size# 8192 lsn# 13 list read persistent buffer offset# 4096 size# 4096 lsn# 12 list write persistent buffer offset# 8192 size# 8192 lsn# 14 letter# B erase persistent buffer offset# 8192 size# 8192 lsn# 14 erase persistent buffer offset# 8192 size# 8192 lsn# 14 erase persistent buffer offset# 4096 size# 8192 lsn# 13 erase persistent buffer offset# 4096 size# 8192 lsn# 13 list list list batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list write persistent buffer offset# 16384 size# 4096 lsn# 15 letter# C list read persistent buffer offset# 16384 size# 4096 lsn# 15 write persistent buffer offset# 45056 size# 4096 lsn# 16 letter# D list batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 list list list list write persistent buffer offset# 49152 size# 4096 lsn# 17 letter# E write persistent buffer offset# 0 size# 8192 lsn# 18 letter# F batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 28672 size# 12288 lsn# 19 letter# G erase persistent buffer offset# 28672 size# 12288 lsn# 19 erase persistent buffer offset# 28672 size# 12288 lsn# 19 list write persistent buffer offset# 0 size# 12288 lsn# 20 letter# H erase persistent buffer offset# 0 size# 12288 lsn# 20 erase persistent buffer offset# 0 size# 12288 lsn# 20 write persistent buffer offset# 36864 size# 12288 lsn# 21 letter# I batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 32768 size# 12288 lsn# 22 letter# J batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list list write persistent buffer offset# 28672 size# 12288 lsn# 23 letter# K list erase persistent buffer offset# 28672 size# 12288 lsn# 23 erase persistent buffer offset# 28672 size# 12288 lsn# 23 list list write persistent buffer offset# 0 size# 4096 lsn# 24 letter# L erase persistent buffer offset# 0 size# 4096 lsn# 24 erase persistent buffer offset# 0 size# 4096 lsn# 24 write persistent buffer offset# 0 size# 4096 lsn# 25 letter# M list write persistent buffer offset# 49152 size# 4096 lsn# 26 letter# N erase persistent buffer offset# 0 size# 4096 lsn# 25 erase persistent buffer offset# 0 size# 4096 lsn# 25 list erase persistent buffer offset# 49152 size# 4096 lsn# 26 erase persistent buffer offset# 49152 size# 4096 lsn# 26 write persistent buffer offset# 20480 size# 12288 lsn# 27 letter# O erase persistent buffer offset# 20480 size# 12288 lsn# 27 erase persistent buffer offset# 20480 size# 12288 lsn# 27 write persistent buffer offset# 36864 size# 4096 lsn# 28 letter# P erase persistent buffer offset# 36864 size# 4096 lsn# 28 erase persistent buffer offset# 36864 size# 4096 lsn# 28 list write persistent buffer offset# 16384 size# 8192 lsn# 29 letter# Q list read persistent buffer offset# 16384 size# 8192 lsn# 29 write persistent buffer offset# 28672 size# 4096 lsn# 30 letter# R batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 40960 size# 4096 lsn# 31 letter# S batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list list write persistent buffer offset# 36864 size# 4096 lsn# 32 letter# T write persistent buffer offset# 45056 size# 8192 lsn# 33 letter# U list write persistent buffer offset# 49152 size# 4096 lsn# 34 letter# V write persistent buffer offset# 32768 size# 4096 lsn# 35 letter# W list list write persistent buffer offset# 0 size# 12288 lsn# 36 letter# X write persistent buffer offset# 49152 size# 4096 lsn# 37 letter# Y read persistent buffer offset# 36864 size# 4096 lsn# 32 list write persistent buffer offset# 16384 size# 4096 lsn# 38 letter# Z batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 read persistent buffer offset# 16384 size# 4096 lsn# 38 erase persistent buffer offset# 45056 size# 8192 lsn# 33 erase persistent buffer offset# 45056 size# 8192 lsn# 33 erase persistent buffer offset# 16384 size# 4096 lsn# 38 erase persistent buffer offset# 16384 size# 4096 lsn# 38 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 45056 size# 8192 lsn# 39 letter# a list write persistent buffer offset# 40960 size# 4096 lsn# 40 letter# b write persistent buffer offset# 28672 size# 12288 lsn# 41 letter# c read persistent buffer offset# 40960 size# 4096 lsn# 40 read persistent buffer offset# 40960 size# 4096 lsn# 40 list write persistent buffer offset# 53248 size# 4096 lsn# 42 letter# d write persistent buffer offset# 36864 size# 8192 lsn# 43 letter# e read persistent buffer offset# 45056 size# 8192 lsn# 39 list list list list read persistent buffer offset# 53248 size# 4096 lsn# 42 write persistent buffer offset# 4096 size# 8192 lsn# 44 letter# f write persistent buffer offset# 53248 size# 12288 lsn# 45 letter# g read persistent buffer offset# 53248 size# 12288 lsn# 45 write persistent buffer offset# 49152 size# 4096 lsn# 46 letter# h list erase persistent buffer offset# 53248 size# 4096 lsn# 42 erase persistent buffer offset# 53248 size# 4096 lsn# 42 read persistent buffer offset# 40960 size# 4096 lsn# 40 batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 list list read persistent buffer offset# 4096 size# 8192 lsn# 44 read persistent buffer offset# 4096 size# 8192 lsn# 44 read persistent buffer offset# 45056 size# 8192 lsn# 39 write persistent buffer offset# 53248 size# 12288 lsn# 47 letter# i write persistent buffer offset# 12288 size# 8192 lsn# 48 letter# j erase persistent buffer offset# 4096 size# 8192 lsn# 44 erase persistent buffer offset# 4096 size# 8192 lsn# 44 erase persistent buffer offset# 45056 size# 8192 lsn# 39 erase persistent buffer offset# 45056 size# 8192 lsn# 39 list read persistent buffer offset# 53248 size# 12288 lsn# 47 erase persistent buffer offset# 53248 size# 12288 lsn# 47 erase persistent buffer offset# 53248 size# 12288 lsn# 47 read persistent buffer offset# 28672 size# 12288 lsn# 41 erase persistent buffer offset# 12288 size# 8192 lsn# 48 erase persistent buffer offset# 12288 size# 8192 lsn# 48 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 list list write persistent buffer offset# 57344 size# 4096 lsn# 49 letter# k erase persistent buffer offset# 57344 size# 4096 lsn# 49 erase persistent buffer offset# 57344 size# 4096 lsn# 49 list write persistent buffer offset# 12288 size# 8192 lsn# 50 letter# l write persistent buffer offset# 8192 size# 4096 lsn# 51 letter# m write persistent buffer offset# 36864 size# 12288 lsn# 52 letter# n write persistent buffer offset# 32768 size# 8192 lsn# 53 letter# o erase persistent buffer offset# 36864 size# 12288 lsn# 52 erase persistent buffer offset# 36864 size# 12288 lsn# 52 erase persistent buffer offset# 12288 size# 8192 lsn# 50 erase persistent buffer offset# 12288 size# 8192 lsn# 50 list batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 36864 size# 4096 lsn# 54 letter# p write persistent buffer offset# 57344 size# 8192 lsn# 55 letter# q erase persistent buffer offset# 57344 size# 8192 lsn# 55 erase persistent buffer offset# 57344 size# 8192 lsn# 55 erase persistent buffer offset# 36864 size# 4096 lsn# 54 erase persistent buffer offset# 36864 size# 4096 lsn# 54 write persistent buffer offset# 12288 size# 4096 lsn# 56 letter# r write persistent buffer offset# 57344 size# 4096 lsn# 57 letter# s list erase persistent buffer offset# 12288 size# 4096 lsn# 56 erase persistent buffer offset# 12288 size# 4096 lsn# 56 list erase persistent buffer offset# 57344 size# 4096 lsn# 57 erase persistent buffer offset# 57344 size# 4096 lsn# 57 list write persistent buffer offset# 61440 size# 4096 lsn# 58 letter# t erase persistent buffer offset# 61440 size# 4096 lsn# 58 erase persistent buffer offset# 61440 size# 4096 lsn# 58 list list write persistent buffer offset# 49152 size# 4096 lsn# 59 letter# u list write persistent buffer offset# 49152 size# 4096 lsn# 60 letter# v erase persistent buffer offset# 49152 size# 4096 lsn# 60 erase persistent buffer offset# 49152 size# 4096 lsn# 60 write persistent buffer offset# 61440 size# 4096 lsn# 61 letter# w erase persistent buffer offset# 61440 size# 4096 lsn# 61 erase persistent buffer offset# 61440 size# 4096 lsn# 61 erase persistent buffer offset# 49152 size# 4096 lsn# 59 erase persistent buffer offset# 49152 size# 4096 lsn# 59 list list list write persistent buffer offset# 4096 size# 4096 lsn# 62 letter# x write persistent buffer offset# 36864 size# 4096 lsn# 63 letter# y erase persistent buffer offset# 36864 size# 4096 lsn# 63 erase persistent buffer offset# 36864 size# 4096 lsn# 63 list read persistent buffer offset# 4096 size# 4096 lsn# 62 read persistent buffer offset# 4096 size# 4096 lsn# 62 write persistent buffer offset# 0 size# 4096 lsn# 64 letter# z write persistent buffer offset# 45056 size# 8192 lsn# 65 letter# . list batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 list list list write persistent buffer offset# 32768 size# 12288 lsn# 66 letter# / write persistent buffer offset# 8192 size# 4096 lsn# 67 letter# 0 write persistent buffer offset# 4096 size# 12288 lsn# 68 letter# 1 list read persistent buffer offset# 4096 size# 12288 lsn# 68 read persistent buffer offset# 8192 size# 4096 lsn# 67 batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 list list list write persistent buffer offset# 57344 size# 4096 lsn# 69 letter# 2 erase persistent buffer offset# 57344 size# 4096 lsn# 69 eras ... tent buffer offset# 53248 size# 12288 lsn# 7941 letter# 2 list read persistent buffer offset# 53248 size# 12288 lsn# 7941 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 40960 size# 4096 lsn# 7942 letter# 3 read persistent buffer offset# 40960 size# 4096 lsn# 7942 write persistent buffer offset# 53248 size# 12288 lsn# 7943 letter# 4 write persistent buffer offset# 49152 size# 8192 lsn# 7944 letter# 5 erase persistent buffer offset# 4096 size# 8192 lsn# 7939 erase persistent buffer offset# 4096 size# 8192 lsn# 7939 write persistent buffer offset# 61440 size# 4096 lsn# 7945 letter# 6 read persistent buffer offset# 49152 size# 8192 lsn# 7944 write persistent buffer offset# 16384 size# 4096 lsn# 7946 letter# 7 read persistent buffer offset# 49152 size# 8192 lsn# 7944 erase persistent buffer offset# 16384 size# 4096 lsn# 7946 erase persistent buffer offset# 16384 size# 4096 lsn# 7946 batch erase persistent buffer count# 4 batch erase persistent buffer count# 4 write persistent buffer offset# 49152 size# 4096 lsn# 7947 letter# 8 list list write persistent buffer offset# 20480 size# 4096 lsn# 7948 letter# 9 list list read persistent buffer offset# 49152 size# 4096 lsn# 7947 batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 49152 size# 4096 lsn# 7949 letter# A erase persistent buffer offset# 49152 size# 4096 lsn# 7949 erase persistent buffer offset# 49152 size# 4096 lsn# 7949 list list write persistent buffer offset# 28672 size# 8192 lsn# 7950 letter# B erase persistent buffer offset# 28672 size# 8192 lsn# 7950 erase persistent buffer offset# 28672 size# 8192 lsn# 7950 write persistent buffer offset# 28672 size# 8192 lsn# 7951 letter# C read persistent buffer offset# 28672 size# 8192 lsn# 7951 list list batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list list write persistent buffer offset# 28672 size# 8192 lsn# 7952 letter# D read persistent buffer offset# 28672 size# 8192 lsn# 7952 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list write persistent buffer offset# 49152 size# 12288 lsn# 7953 letter# E list erase persistent buffer offset# 49152 size# 12288 lsn# 7953 erase persistent buffer offset# 49152 size# 12288 lsn# 7953 list list list write persistent buffer offset# 36864 size# 8192 lsn# 7954 letter# F read persistent buffer offset# 36864 size# 8192 lsn# 7954 read persistent buffer offset# 36864 size# 8192 lsn# 7954 write persistent buffer offset# 4096 size# 8192 lsn# 7955 letter# G batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 57344 size# 8192 lsn# 7956 letter# H read persistent buffer offset# 57344 size# 8192 lsn# 7956 write persistent buffer offset# 45056 size# 8192 lsn# 7957 letter# I erase persistent buffer offset# 57344 size# 8192 lsn# 7956 erase persistent buffer offset# 57344 size# 8192 lsn# 7956 erase persistent buffer offset# 45056 size# 8192 lsn# 7957 erase persistent buffer offset# 45056 size# 8192 lsn# 7957 write persistent buffer offset# 8192 size# 12288 lsn# 7958 letter# J erase persistent buffer offset# 8192 size# 12288 lsn# 7958 erase persistent buffer offset# 8192 size# 12288 lsn# 7958 write persistent buffer offset# 28672 size# 4096 lsn# 7959 letter# K write persistent buffer offset# 45056 size# 12288 lsn# 7960 letter# L write persistent buffer offset# 36864 size# 4096 lsn# 7961 letter# M write persistent buffer offset# 49152 size# 12288 lsn# 7962 letter# N write persistent buffer offset# 16384 size# 8192 lsn# 7963 letter# O batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 list read persistent buffer offset# 16384 size# 8192 lsn# 7963 list erase persistent buffer offset# 16384 size# 8192 lsn# 7963 erase persistent buffer offset# 16384 size# 8192 lsn# 7963 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list list list write persistent buffer offset# 61440 size# 4096 lsn# 7964 letter# P read persistent buffer offset# 61440 size# 4096 lsn# 7964 erase persistent buffer offset# 61440 size# 4096 lsn# 7964 erase persistent buffer offset# 61440 size# 4096 lsn# 7964 list list Restarting node 2 2026-02-19T08:24:57.560534Z 1 01h19m30.044304s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2026-02-19T08:25:00.414934Z 2 01h19m30.044816s :BS_DDISK ERROR: {BSDD11@ddisk_actor_persistent_buffer.cpp:85} TDDiskActor::StartRestorePersistentBuffer duplicated lsn for tablet in persistent buffer TabletId# 1 VChunkIndex# 31 Lsn# 6385 2026-02-19T08:25:00.602597Z 2 01h19m30.044816s :BS_DDISK ERROR: {BSDD11@ddisk_actor_persistent_buffer.cpp:85} TDDiskActor::StartRestorePersistentBuffer duplicated lsn for tablet in persistent buffer TabletId# 1 VChunkIndex# 31 Lsn# 6385 2026-02-19T08:25:00.907731Z 2 01h19m30.044816s :BS_DDISK ERROR: {BSDD11@ddisk_actor_persistent_buffer.cpp:85} TDDiskActor::StartRestorePersistentBuffer duplicated lsn for tablet in persistent buffer TabletId# 1 VChunkIndex# 24 Lsn# 4945 list list write persistent buffer offset# 45056 size# 8192 lsn# 7965 letter# Q read persistent buffer offset# 45056 size# 8192 lsn# 7965 write persistent buffer offset# 49152 size# 12288 lsn# 7966 letter# R batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 36864 size# 4096 lsn# 7967 letter# S list write persistent buffer offset# 32768 size# 12288 lsn# 7968 letter# T batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 list list list write persistent buffer offset# 49152 size# 4096 lsn# 7969 letter# U read persistent buffer offset# 49152 size# 4096 lsn# 7969 read persistent buffer offset# 49152 size# 4096 lsn# 7969 read persistent buffer offset# 49152 size# 4096 lsn# 7969 read persistent buffer offset# 49152 size# 4096 lsn# 7969 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 32768 size# 12288 lsn# 7970 letter# V batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 0 size# 4096 lsn# 7971 letter# W erase persistent buffer offset# 0 size# 4096 lsn# 7971 erase persistent buffer offset# 0 size# 4096 lsn# 7971 write persistent buffer offset# 16384 size# 8192 lsn# 7972 letter# X erase persistent buffer offset# 16384 size# 8192 lsn# 7972 erase persistent buffer offset# 16384 size# 8192 lsn# 7972 list list write persistent buffer offset# 4096 size# 12288 lsn# 7973 letter# Y erase persistent buffer offset# 4096 size# 12288 lsn# 7973 erase persistent buffer offset# 4096 size# 12288 lsn# 7973 write persistent buffer offset# 36864 size# 4096 lsn# 7974 letter# Z batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list write persistent buffer offset# 20480 size# 12288 lsn# 7975 letter# a batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list write persistent buffer offset# 40960 size# 12288 lsn# 7976 letter# b erase persistent buffer offset# 40960 size# 12288 lsn# 7976 erase persistent buffer offset# 40960 size# 12288 lsn# 7976 list write persistent buffer offset# 12288 size# 8192 lsn# 7977 letter# c write persistent buffer offset# 20480 size# 4096 lsn# 7978 letter# d batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 write persistent buffer offset# 12288 size# 12288 lsn# 7979 letter# e list list batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 24576 size# 12288 lsn# 7980 letter# f batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 36864 size# 8192 lsn# 7981 letter# g list read persistent buffer offset# 36864 size# 8192 lsn# 7981 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list write persistent buffer offset# 16384 size# 4096 lsn# 7982 letter# h read persistent buffer offset# 16384 size# 4096 lsn# 7982 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list list write persistent buffer offset# 12288 size# 8192 lsn# 7983 letter# i read persistent buffer offset# 12288 size# 8192 lsn# 7983 write persistent buffer offset# 28672 size# 4096 lsn# 7984 letter# j read persistent buffer offset# 12288 size# 8192 lsn# 7983 read persistent buffer offset# 12288 size# 8192 lsn# 7983 write persistent buffer offset# 61440 size# 4096 lsn# 7985 letter# k list list batch erase persistent buffer count# 2 batch erase persistent buffer count# 2 read persistent buffer offset# 12288 size# 8192 lsn# 7983 erase persistent buffer offset# 12288 size# 8192 lsn# 7983 erase persistent buffer offset# 12288 size# 8192 lsn# 7983 write persistent buffer offset# 40960 size# 8192 lsn# 7986 letter# l erase persistent buffer offset# 40960 size# 8192 lsn# 7986 erase persistent buffer offset# 40960 size# 8192 lsn# 7986 list list write persistent buffer offset# 61440 size# 4096 lsn# 7987 letter# m read persistent buffer offset# 61440 size# 4096 lsn# 7987 batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 8192 size# 12288 lsn# 7988 letter# n list read persistent buffer offset# 8192 size# 12288 lsn# 7988 erase persistent buffer offset# 8192 size# 12288 lsn# 7988 erase persistent buffer offset# 8192 size# 12288 lsn# 7988 list write persistent buffer offset# 4096 size# 8192 lsn# 7989 letter# o list write persistent buffer offset# 24576 size# 12288 lsn# 7990 letter# p write persistent buffer offset# 8192 size# 12288 lsn# 7991 letter# q erase persistent buffer offset# 24576 size# 12288 lsn# 7990 erase persistent buffer offset# 24576 size# 12288 lsn# 7990 read persistent buffer offset# 8192 size# 12288 lsn# 7991 list write persistent buffer offset# 20480 size# 4096 lsn# 7992 letter# r read persistent buffer offset# 20480 size# 4096 lsn# 7992 erase persistent buffer offset# 4096 size# 8192 lsn# 7989 erase persistent buffer offset# 4096 size# 8192 lsn# 7989 write persistent buffer offset# 45056 size# 8192 lsn# 7993 letter# s write persistent buffer offset# 45056 size# 8192 lsn# 7994 letter# t erase persistent buffer offset# 20480 size# 4096 lsn# 7992 erase persistent buffer offset# 20480 size# 4096 lsn# 7992 read persistent buffer offset# 45056 size# 8192 lsn# 7993 batch erase persistent buffer count# 3 batch erase persistent buffer count# 3 write persistent buffer offset# 32768 size# 12288 lsn# 7995 letter# u list write persistent buffer offset# 32768 size# 12288 lsn# 7996 letter# v erase persistent buffer offset# 32768 size# 12288 lsn# 7995 erase persistent buffer offset# 32768 size# 12288 lsn# 7995 read persistent buffer offset# 32768 size# 12288 lsn# 7996 erase persistent buffer offset# 32768 size# 12288 lsn# 7996 erase persistent buffer offset# 32768 size# 12288 lsn# 7996 list list list list write persistent buffer offset# 45056 size# 4096 lsn# 7997 letter# w list read persistent buffer offset# 45056 size# 4096 lsn# 7997 erase persistent buffer offset# 45056 size# 4096 lsn# 7997 erase persistent buffer offset# 45056 size# 4096 lsn# 7997 list write persistent buffer offset# 49152 size# 4096 lsn# 7998 letter# x batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 list list write persistent buffer offset# 16384 size# 4096 lsn# 7999 letter# y batch erase persistent buffer count# 1 batch erase persistent buffer count# 1 write persistent buffer offset# 20480 size# 12288 lsn# 8000 letter# z write persistent buffer offset# 20480 size# 12288 lsn# 8001 letter# . erase persistent buffer offset# 20480 size# 12288 lsn# 8000 erase persistent buffer offset# 20480 size# 12288 lsn# 8000 erase persistent buffer offset# 20480 size# 12288 lsn# 8001 erase persistent buffer offset# 20480 size# 12288 lsn# 8001 list write persistent buffer offset# 4096 size# 8192 lsn# 8002 letter# / >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2026-02-19T08:24:55.227433Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489447346064700:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:55.228551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:55.445835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:55.467145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:55.467294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:55.535154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:55.545565Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:55.564200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489447346064648:2081] 1771489495215652 != 1771489495215655 2026-02-19T08:24:55.611677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:55.810473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:55.810499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:55.810510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:55.810617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:56.042954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:56.048232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:56.051861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:56.144207Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:56.144238Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:56.144828Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:56.229526Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:58.075746Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489496140, tx_id: 281474976710658 } } } 2026-02-19T08:24:58.076059Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:24:58.077609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:58.078420Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:24:58.078458Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:24:58.108163Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:24:58.108199Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:24:58.924178Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489461284173350:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:58.927005Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:58.939797Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:59.000262Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489461284173304:2081] 1771489498921856 != 1771489498921859 2026-02-19T08:24:59.014875Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:59.045997Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:59.046076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:59.049804Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:59.201797Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:59.231911Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:59.231942Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:59.231949Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:59.232026Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:59.519098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:59.529928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:59.535583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:59.576613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:59.610497Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:59.610517Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:59.610987Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:59.931399Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:02.102014Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489499612, tx_id: 281474976710658 } } } 2026-02-19T08:25:02.102364Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:02.143994Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:02.148161Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489499647 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:02.148485Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TxUsage::WriteToTopic_Demo_14_Query >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer |93.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_ddisk/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::SamePartitionCount [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:02.877331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:02.877445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:02.877544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:02.877587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:02.877627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:02.877676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:02.877756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:02.877804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:02.877882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:02.877962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:02.878959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:02.879305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:02.961524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:02.961574Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:02.972408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:02.973214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:02.973439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:02.986652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:02.987581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:02.988452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:02.988846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:02.992862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:02.993104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:02.994529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:02.994612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:02.994777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:02.994830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:02.994879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:02.995088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:03.152120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.153918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.154418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:25:04.135990Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:04.136149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:04.136404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 Forgetting tablet 72075186233409551 2026-02-19T08:25:04.138535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:25:04.138833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:04.139589Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-02-19T08:25:04.142099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:04.142346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:04.142705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:04.142914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:04.143450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:25:04.143679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:04.143742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:04.143844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:04.147627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:04.147684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:25:04.147787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:7 2026-02-19T08:25:04.147876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:04.147992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:04.148037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:04.148129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:04.148447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:04.148481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:04.149098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:04.149133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:04.151214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:25:04.151287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:25:04.151364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:04.151386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:04.151446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:04.151508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:04.151618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:04.153382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 103 2026-02-19T08:25:04.153652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:25:04.153746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:25:04.154242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:25:04.154348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:04.154387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1143:3050] TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:04.154988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:04.155212Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusPathDoesNotExist 2026-02-19T08:25:04.155372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:04.155881Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:04.156067Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 214us result status StatusSuccess 2026-02-19T08:25:04.156544Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::LS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: 2026-02-19T08:24:29.583709Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489338851322638:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:29.584025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:29.809670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:29.841330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:29.841418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:29.884541Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:29.886677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:29.886943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489338851322525:2081] 1771489469572659 != 1771489469572662 2026-02-19T08:24:29.973617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:29.973638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:29.973645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:29.973729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:30.007377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:32.730015Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006bb/r3tmp/spilling-tmp-runner/node_1_e2f25e99-5901cf7c-78c0b1f8-3d66d3a1, actor: [1:7608489351736226251:2385] 2026-02-19T08:24:32.730132Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006bb/r3tmp/spilling-tmp-runner 2026-02-19T08:24:34.913541Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913604Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913636Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913668Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913693Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913750Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913790Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913825Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913866Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913927Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.913977Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:34.914035Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0cycegztsnp6dkfv9znb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmJjNDQzMi1mZjYwMmQzNC00M2VkNGUxNS1kNmUyNjU1ZQ==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:34.918518Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489360326161929:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0cycegztsnp6dkfv9znb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmJjNDQzMi1mZjYwMmQzNC00M2VkNGUxNS1kNmUyNjU1ZQ==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:34.918564Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489360326161929:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0cycegztsnp6dkfv9znb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmJjNDQzMi1mZjYwMmQzNC00M2VkNGUxNS1kNmUyNjU1ZQ==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:34.922260Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:34.922433Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922498Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922555Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922604Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922690Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922763Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922842Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922907Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.922974Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:34.923028Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [3:7608489477591018910:2669], CA [3:7608489477591018911:2670], CA [3:7608489477591018908:2667], CA [3:7608489477591018912:2671], CA [3:7608489477591018909:2668], trace_id# 2026-02-19T08:25:02.466751Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:7608489477591018908:2667] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1768 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1045 FinishTimeMs: 1771489502465 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 100 ReadBytes: 800 AffectedPartitions: 2 } IngressRows: 100 ComputeCpuTimeUs: 241 BuildCpuTimeUs: 804 WaitOutputTimeUs: 1216 HostName: "ghrun-fkrokuvqke" NodeId: 3 StartTimeMs: 1771489502464 CreateTimeMs: 1771489502459 UpdateTimeMs: 1771489502465 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:02.466831Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [3:7608489477591018908:2667] 2026-02-19T08:25:02.466886Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [3:7608489477591018910:2669], CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], CA [3:7608489477591018909:2668], trace_id# 2026-02-19T08:25:02.466947Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [3:7608489477591018910:2669], CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], CA [3:7608489477591018909:2668], trace_id# 2026-02-19T08:25:02.467192Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:7608489477591018909:2668] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1852 DurationUs: 1000 Tasks { TaskId: 2 CpuTimeUs: 1101 FinishTimeMs: 1771489502466 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 200 ReadBytes: 1600 AffectedPartitions: 2 } IngressRows: 200 ComputeCpuTimeUs: 396 BuildCpuTimeUs: 705 WaitOutputTimeUs: 1312 HostName: "ghrun-fkrokuvqke" NodeId: 3 StartTimeMs: 1771489502465 CreateTimeMs: 1771489502459 UpdateTimeMs: 1771489502466 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:02.467232Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [3:7608489477591018909:2668] 2026-02-19T08:25:02.467276Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [3:7608489477591018910:2669], CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.467313Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [3:7608489477591018910:2669], CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.467435Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:7608489477591018910:2669] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1039 DurationUs: 1000 Tasks { TaskId: 3 CpuTimeUs: 420 FinishTimeMs: 1771489502466 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 200 ReadBytes: 1600 AffectedPartitions: 2 } IngressRows: 200 ComputeCpuTimeUs: 351 BuildCpuTimeUs: 69 WaitOutputTimeUs: 1024 HostName: "ghrun-fkrokuvqke" NodeId: 3 StartTimeMs: 1771489502465 CreateTimeMs: 1771489502460 UpdateTimeMs: 1771489502466 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:02.467464Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [3:7608489477591018910:2669] 2026-02-19T08:25:02.467494Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.467531Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [3:7608489477591018911:2670], CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.467836Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:7608489477591018911:2670] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1113 Tasks { TaskId: 4 CpuTimeUs: 468 FinishTimeMs: 1771489502466 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 200 ReadBytes: 1600 AffectedPartitions: 2 } IngressRows: 200 ComputeCpuTimeUs: 394 BuildCpuTimeUs: 74 WaitOutputTimeUs: 786 HostName: "ghrun-fkrokuvqke" NodeId: 3 StartTimeMs: 1771489502466 CreateTimeMs: 1771489502460 UpdateTimeMs: 1771489502466 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:02.467896Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [3:7608489477591018911:2670] 2026-02-19T08:25:02.467942Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.467982Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:7608489477591018912:2671], trace_id# 2026-02-19T08:25:02.468100Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:7608489477591018912:2671] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 4353 DurationUs: 1000 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 297 FinishTimeMs: 1771489502467 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 197 BuildCpuTimeUs: 100 HostName: "ghrun-fkrokuvqke" NodeId: 3 StartTimeMs: 1771489502466 CreateTimeMs: 1771489502461 UpdateTimeMs: 1771489502467 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:02.468129Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [3:7608489477591018912:2671] 2026-02-19T08:25:02.468239Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:25:02.468276Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:25:02.468312Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [3:7608489477591018901:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg18s6a8gasg86e12ezbyw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2IxYmQ5ZjUtOGVlZjMzMTUtYzAwNzJmNTgtYWIwYzBmZTQ=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.010125s ReadRows: 700 ReadBytes: 5600 RequestUnits# 700 ForceFlag# true trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2026-02-19T08:24:57.093331Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489459052036409:2169];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:57.093691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:57.412853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:57.412986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:57.458365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:57.475958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:57.480236Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:57.481644Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489459052036274:2081] 1771489497083292 != 1771489497083295 2026-02-19T08:24:57.645783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:57.700776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:57.700796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:57.700803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:57.700887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:57.872237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:57.878614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:24:57.883297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:58.005434Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:58.005460Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:58.006042Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:58.101785Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:00.044463Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489497974, tx_id: 281474976710659 } } } 2026-02-19T08:25:00.044945Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:00.046662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:00.047625Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2026-02-19T08:25:00.047648Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2026-02-19T08:25:00.071641Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2026-02-19T08:25:00.071660Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:25:00.826739Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489470820918015:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:00.830295Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:00.843411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:00.933551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:00.933633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:00.933779Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:00.938092Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:00.941914Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489470820917973:2081] 1771489500824337 != 1771489500824340 2026-02-19T08:25:00.973143Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:01.182252Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:01.198270Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:01.198290Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:01.198297Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:01.198368Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:01.511278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:01.529620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:25:01.535043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:01.614814Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:01.614844Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:01.615556Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:01.831061Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:04.135290Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489501635, tx_id: 281474976710658 } } } 2026-02-19T08:25:04.135608Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:04.136954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:04.138260Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:25:04.138283Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:25:04.167862Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:25:04.167888Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2026-02-19T08:24:57.587453Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489457545759089:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:57.588532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:57.808283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:57.815348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:57.815443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:57.860358Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:57.862424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:57.862684Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489457545759053:2081] 1771489497585888 != 1771489497585891 2026-02-19T08:24:58.064591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:58.084447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:58.084467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:58.084473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:58.084563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:58.311490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:58.324142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:58.441386Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:24:58.441409Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:24:58.441898Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:24:58.594954Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:00.490585Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489498422, tx_id: 281474976710658 } } } 2026-02-19T08:25:00.490957Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:00.493178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:00.494536Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:25:00.494593Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:25:00.524723Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:25:00.524751Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 40] 2026-02-19T08:25:01.369192Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489475359423450:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:01.369238Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:01.401818Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:01.484435Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:01.489937Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489475359423408:2081] 1771489501366158 != 1771489501366161 2026-02-19T08:25:01.513165Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:01.513243Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:01.523789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:01.694800Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:01.725135Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:01.725161Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:01.725170Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:01.725257Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:02.045507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:02.055945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:02.370164Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:02.370190Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:02.370616Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:02.385278Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:04.538051Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489502356, tx_id: 281474976715658 } } } 2026-02-19T08:25:04.538416Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:04.539832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:04.540535Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-02-19T08:25:04.540555Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-02-19T08:25:04.577235Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-02-19T08:25:04.577283Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 41] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:04.452218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:04.452295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:04.452380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:04.452416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:04.452452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:04.452489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:04.452518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:04.452561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:04.452627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:04.452713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:04.453532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:04.453844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:04.537331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:04.537400Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:04.549172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:04.550045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:04.550234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:04.565322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:04.566063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:04.566845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:04.567160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:04.570437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:04.570631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:04.571820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:04.571906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:04.572033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:04.572077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:04.572128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:04.572290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:04.705623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.706625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.706769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.706843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.706961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.707874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... d: 72075186233409549 CpuTimeUsec: 1373 } } CommitVersion { Step: 230 TxId: 104 } 2026-02-19T08:25:05.656689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 230 OrderId: 104 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1373 } } CommitVersion { Step: 230 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 2026-02-19T08:25:05.657318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 942 RawX2: 4294970189 } Origin: 72075186233409549 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:25:05.657377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2026-02-19T08:25:05.657504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 942 RawX2: 4294970189 } Origin: 72075186233409549 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:25:05.657559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:25:05.657652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 942 RawX2: 4294970189 } Origin: 72075186233409549 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:25:05.657732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:05.657778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:05.657814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:25:05.657866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 ProposedWaitParts -> Done 2026-02-19T08:25:05.661207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:25:05.663094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:25:05.663214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:05.663352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:05.663656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:05.663710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:25:05.663812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:05.663850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:05.663890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:05.663922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:05.663974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:25:05.664055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:757:2745] message: TxId: 104 2026-02-19T08:25:05.664100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:05.664159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:25:05.664214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:25:05.664329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:25:05.666318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:25:05.666365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:968:2911] TestWaitNotification: OK eventTxId 104 2026-02-19T08:25:05.666968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:05.667196Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusSuccess 2026-02-19T08:25:05.667713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 230 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:05.668470Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:05.668745Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 274us result status StatusSuccess 2026-02-19T08:25:05.669176Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 230 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TSchemeShardSubDomainTest::LS [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:02.956938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:02.957018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:02.957100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:02.957136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:02.957167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:02.957203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:02.957231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:02.957277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:02.957344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:02.957437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:02.958262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:02.958580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:03.046672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:03.046750Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:03.061823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:03.062555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:03.062751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:03.074270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:03.075029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:03.075762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:03.076091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:03.079309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:03.079516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:03.080759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:03.080866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:03.080987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:03.081035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:03.081075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:03.081247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:03.213333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.214967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.215044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.215104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.215164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.215227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:03.215324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 20273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:07.020327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:07.020493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-02-19T08:25:07.020647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:07.020698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-02-19T08:25:07.020745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-02-19T08:25:07.021104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:07.021146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-02-19T08:25:07.021222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:07.021254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-02-19T08:25:07.021293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 ProposedWaitParts -> Done 2026-02-19T08:25:07.021967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:07.022043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:07.022095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:07.022128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-02-19T08:25:07.022158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-02-19T08:25:07.022878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:07.022932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:07.022952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:07.022987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-02-19T08:25:07.023015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-02-19T08:25:07.023067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:25:07.026857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:07.026906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-02-19T08:25:07.027171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-02-19T08:25:07.027322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:07.027370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:07.027403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:07.027429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:07.027459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:25:07.027507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:993:2931] message: TxId: 104 2026-02-19T08:25:07.027542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:07.027574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:25:07.027601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:25:07.027689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-02-19T08:25:07.028176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:07.028217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:07.028799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:07.029672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:07.030776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:07.030815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-02-19T08:25:07.030875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:25:07.030923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1414:3330] 2026-02-19T08:25:07.031500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-02-19T08:25:07.032900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-02-19T08:25:07.033102Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 226us result status StatusSuccess 2026-02-19T08:25:07.033521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:06.350959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:06.351045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:06.351107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:06.351145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:06.351179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:06.351215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:06.351245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:06.351298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:06.351366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:06.351464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:06.352262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:06.352583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:06.443451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:06.443510Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:06.456386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:06.457066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:06.457252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:06.468691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:06.469413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:06.470201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:06.470499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:06.473453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:06.473671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:06.474861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:06.474924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:06.475054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:06.475098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:06.475139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:06.475324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:06.635275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.638992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.639966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.640047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 08:25:07.366911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:25:07.366965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-02-19T08:25:07.367264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:25:07.367321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:25:07.367432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:25:07.367471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:07.367506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:25:07.367533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:07.367569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:25:07.367622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:07.367663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:25:07.367693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:25:07.367858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-02-19T08:25:07.367906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:25:07.367944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:25:07.367974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-02-19T08:25:07.368741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:07.368819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:07.368861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:07.368921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:25:07.368964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:07.369522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:07.369595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:07.369621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:07.369647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:25:07.369688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:25:07.369775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:25:07.372601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:07.373515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-02-19T08:25:07.373819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:07.373865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-02-19T08:25:07.374305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:07.374389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:07.374424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:897:2851] TestWaitNotification: OK eventTxId 100 2026-02-19T08:25:07.374904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:07.375126Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 213us result status StatusSuccess 2026-02-19T08:25:07.375569Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:07.376187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:07.376368Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 181us result status StatusSuccess 2026-02-19T08:25:07.376811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DstCreator::ColumnTypeMismatch [GOOD] >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CopyRejects >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2026-02-19T08:25:00.505830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489470382055575:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:00.505926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:00.770075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:00.770182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:00.793022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:00.827345Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:00.831695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:00.832016Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489470382055543:2081] 1771489500503981 != 1771489500503984 2026-02-19T08:25:00.976086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:01.147010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:01.147042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:01.147051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:01.147163Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:01.433338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:01.446555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:25:01.464959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:01.538507Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:01.637484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:01.691923Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:01.691954Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:01.692588Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:03.680940Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489501607, tx_id: 281474976710658 } } } 2026-02-19T08:25:03.681381Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:03.724880Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:03.736407Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489501719 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:03.736698Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication mode mismatch: expected: REPLICATION_MODE_READ_ONLY, got: 0 2026-02-19T08:25:04.470981Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489488237309323:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:04.477147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:04.501779Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:04.578004Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:04.579529Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489488237309292:2081] 1771489504469045 != 1771489504469048 2026-02-19T08:25:04.613898Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:04.613978Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:04.618082Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:04.689423Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:04.834943Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:04.834972Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:04.834980Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:04.835052Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:05.126439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:05.132913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:05.181252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:05.226512Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:05.226539Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:05.227086Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:05.476193Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:07.480817Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489505212, tx_id: 281474976715658 } } } 2026-02-19T08:25:07.481085Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:07.482497Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:07.483524Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1771489505254 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:07.483732Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> IncrementalBackup::IncrementalBackupWithIndexes [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:07.660872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:07.660967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:07.661036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:07.661076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:07.661108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:07.661144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:07.661171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:07.661225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:07.661285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:07.661359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:07.662819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:07.663135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:07.747101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:07.747161Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:07.758332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:07.759017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:07.759233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:07.770287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:07.771051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:07.771710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:07.772009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:07.774839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:07.775017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:07.776150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:07.776215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:07.776338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:07.776375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:07.776411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:07.776558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:07.925321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.926995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.927073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.927134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.927196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.927263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:07.927378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:08.539978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:08.540069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.540107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-02-19T08:25:08.540184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-02-19T08:25:08.540428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.540477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 100:0 ProgressState 2026-02-19T08:25:08.540568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:08.540601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:08.540647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:08.540678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:08.540712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-02-19T08:25:08.540746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:08.540778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-02-19T08:25:08.540822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 100:0 2026-02-19T08:25:08.540886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:08.540924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-02-19T08:25:08.540960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:08.540992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:08.541643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:08.541741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:08.541777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:08.541813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:08.541903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:08.542578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:08.542676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:08.542703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:08.542727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:08.542751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:08.542817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-02-19T08:25:08.547633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:08.548464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-02-19T08:25:08.548645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:08.548691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:25:08.548792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:08.548808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:25:08.549164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:08.549250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:08.549274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:717:2705] 2026-02-19T08:25:08.549368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:08.549407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:08.549421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:717:2705] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:08.549767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:08.549985Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 236us result status StatusSuccess 2026-02-19T08:25:08.550474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:08.550957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:08.551087Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 152us result status StatusPathDoesNotExist 2026-02-19T08:25:08.551204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2026-02-19T08:25:00.394581Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489468248141447:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:00.395723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:00.601782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:00.624389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:00.624471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:00.685759Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489468248141397:2081] 1771489500392703 != 1771489500392706 2026-02-19T08:25:00.697856Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:00.702110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:00.869799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:01.112995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:01.113021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:01.113027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:01.113104Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:01.402166Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:01.449535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:01.454283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:25:01.457297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:01.558644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:01.595183Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:01.595227Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:01.595749Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:03.585837Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489501558, tx_id: 281474976710658 } } } 2026-02-19T08:25:03.586303Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:03.622135Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:03.624672Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489501628 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:03.624875Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Columns size mismatch: expected: 2, got: 3 2026-02-19T08:25:04.401069Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:04.401196Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:04.461039Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:04.463438Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489488360964403:2081] 1771489504353798 != 1771489504353801 2026-02-19T08:25:04.471465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:04.471535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:04.494214Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:04.687028Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:04.698589Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:04.698618Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:04.698631Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:04.698746Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:05.053120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:05.060216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:05.098864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:05.138832Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:05.138861Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:05.139457Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:05.388311Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:07.702905Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489505135, tx_id: 281474976710658 } } } 2026-02-19T08:25:07.703207Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:07.704989Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:07.706240Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489505170 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:07.706482Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:04.397044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:04.397128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:04.397194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:04.397243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:04.397281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:04.397317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:04.397354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:04.397397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:04.397463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:04.397535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:04.398337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:04.398956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:04.488927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:04.488987Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:04.500784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:04.501368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:04.501558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:04.512701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:04.513279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:04.513828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:04.514091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:04.516541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:04.516708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:04.517729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:04.517781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:04.517887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:04.517936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:04.517977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:04.518202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:04.652658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.653618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.653761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.653835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.653957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:04.654653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... Id: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.884220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.884820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:08.884885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:08.885076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:25:08.885245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.885286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:25:08.885332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:25:08.885832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.885892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:25:08.885971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.886019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:25:08.886075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done 2026-02-19T08:25:08.887149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:08.887263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:08.887307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:08.887350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-02-19T08:25:08.887395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:08.888159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:08.888239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:08.888287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:08.888324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:08.888354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:08.888427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:25:08.891725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.891781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:25:08.892223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:08.892418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:08.892461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:08.892501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:08.892537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:08.892571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:25:08.892639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:811:2776] message: TxId: 103 2026-02-19T08:25:08.892683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:08.892725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:25:08.892761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:25:08.892875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:08.893617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:08.893659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:08.894556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:08.895017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:08.896260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.896315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-02-19T08:25:08.896398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:08.896437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1063:2994] 2026-02-19T08:25:08.897207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:08.898116Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:08.898329Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 236us result status StatusSuccess 2026-02-19T08:25:08.898831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:08.433654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:08.434319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.434407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:08.434443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:08.434477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:08.434517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:08.434546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:08.434604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:08.434701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.434791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:08.435633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:08.435946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:08.513984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:08.514031Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:08.525500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:08.526318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:08.526502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:08.538405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:08.539126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:08.539858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:08.540133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:08.543209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.543387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:08.544809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:08.544868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.544981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:08.545021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:08.545059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:08.545220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.701213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.702928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.703398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... natorID: 72057594046316545 2026-02-19T08:25:09.294154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-02-19T08:25:09.294270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-02-19T08:25:09.294527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:09.294631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:09.294664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-02-19T08:25:09.294822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 100:0 Propose -> Done 2026-02-19T08:25:09.294948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-02-19T08:25:09.295052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:09.295092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:09.295124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-02-19T08:25:09.296658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:09.296684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:09.296785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:09.296871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:09.296912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-02-19T08:25:09.296955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-02-19T08:25:09.297181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.297212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 100:0 ProgressState 2026-02-19T08:25:09.297279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:09.297304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:09.297342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:09.297363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:09.297391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-02-19T08:25:09.297416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:09.297447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-02-19T08:25:09.297473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 100:0 2026-02-19T08:25:09.297512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:09.297549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-02-19T08:25:09.297576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:09.297592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:09.298066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:09.298151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:09.298182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:09.298233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:09.298266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:09.298738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:09.298804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:09.298833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:09.298851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:09.298870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:09.298956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-02-19T08:25:09.298992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:677:2665] 2026-02-19T08:25:09.301324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:09.302252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:09.302312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:09.302339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:678:2666] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2026-02-19T08:25:09.302796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:09.303015Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 224us result status StatusSuccess 2026-02-19T08:25:09.303458Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> KqpBatchDelete::Large_3 [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:08.446029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:08.446113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.446203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:08.446241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:08.446277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:08.446316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:08.446345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:08.446397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:08.446516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.446605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:08.447467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:08.447789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:08.527980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:08.528029Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:08.539861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:08.540732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:08.540905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:08.552082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:08.552748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:08.553409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:08.553661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:08.556628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.556851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:08.558075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:08.558144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.558284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:08.558336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:08.558379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:08.558587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:08.695083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.696935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.697006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.697076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:08.697158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:25:09.493801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.493907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.494066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:09.494124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:09.494157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:09.494179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:09.494293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.494377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.494576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-02-19T08:25:09.494946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.495082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.495544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.495624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.495945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.496115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.496163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.496268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.496483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.496593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.497778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.501929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:09.506803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:09.506870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:09.507508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:09.507567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:09.507628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:09.510954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2026-02-19T08:25:09.578342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:09.578422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:857:2808] sender: [1:922:2058] recipient: [1:15:2062] 2026-02-19T08:25:09.579234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:09.579332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:09.579387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:920:2856] TestWaitNotification: OK eventTxId 100 2026-02-19T08:25:09.579915Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:09.580190Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 276us result status StatusSuccess 2026-02-19T08:25:09.580643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:09.581334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:09.581517Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 313us result status StatusSuccess 2026-02-19T08:25:09.582001Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2026-02-19T08:25:01.807542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489472188675484:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:01.808286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:01.995601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:02.013674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:02.013946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:02.063734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:02.068221Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:02.069259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489472188675440:2081] 1771489501799066 != 1771489501799069 2026-02-19T08:25:02.284778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:02.326502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:02.326527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:02.326535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:02.326645Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:02.585982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:02.595427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:02.709172Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:02.709193Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:02.709781Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:02.808511Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:04.898541Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489502692, tx_id: 281474976710658 } } } 2026-02-19T08:25:04.898854Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:04.900365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:04.901117Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-02-19T08:25:04.901133Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-02-19T08:25:04.929307Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-02-19T08:25:04.929336Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:25:05.754898Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608489491500694274:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:05.755067Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:25:05.770409Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:25:05.842666Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:05.844830Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608489491500694241:2081] 1771489505753754 != 1771489505753757 2026-02-19T08:25:05.856570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:05.856630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:05.877169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:06.040109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:25:06.054133Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:06.054159Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:06.054165Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:06.054263Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:06.355181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:06.364532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:06.398272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:06.439450Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-02-19T08:25:06.439473Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-02-19T08:25:06.439989Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-02-19T08:25:06.761155Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:08.911522Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489506437, tx_id: 281474976710658 } } } 2026-02-19T08:25:08.911780Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-02-19T08:25:08.944112Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-02-19T08:25:08.947519Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1771489506472 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:25:08.947805Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:08.850859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:08.850960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.851046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:08.851099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:08.851142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:08.851190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:08.851271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:08.851330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:08.851422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:08.851518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:08.852605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:08.852998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:08.946489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:08.946546Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:08.959181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:08.959879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:08.960074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:08.971758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:08.972512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:08.973329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:08.973650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:08.976922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.977129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:08.978455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:08.978523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:08.978661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:08.978708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:08.978761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:08.978966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.138018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.139932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.140008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.140089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.140194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... eshard: 72057594046678944, txId: 106 2026-02-19T08:25:10.018593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:10.018648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:25:10.018729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-02-19T08:25:10.021173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:25:10.021309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:10.021356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:10.021383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:10.022647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:10.023257Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-02-19T08:25:10.024383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:10.024746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:10.025410Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-02-19T08:25:10.025591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:10.026129Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2026-02-19T08:25:10.026405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:10.026677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 Forgetting tablet 72075186233409547 2026-02-19T08:25:10.027607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:10.027783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:10.028655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:10.028728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:10.028893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:10.031017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:10.031077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:10.031158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:10.032396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:10.032452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:10.033796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:10.033845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:10.034199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:10.034239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:10.034645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:10.037773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 104 2026-02-19T08:25:10.038195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:25:10.038246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:25:10.038887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:25:10.038993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:25:10.039031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1023:2977] TestWaitNotification: OK eventTxId 104 2026-02-19T08:25:10.039795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:10.040051Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 253us result status StatusPathDoesNotExist 2026-02-19T08:25:10.040250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:10.041009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:10.041202Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 208us result status StatusSuccess 2026-02-19T08:25:10.041700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.6%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:06.605204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:06.605270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:06.605377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:06.605445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:06.605496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:06.605539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:06.605567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:06.605621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:06.605685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:06.605784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:06.606628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:06.606974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:06.687796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:06.687859Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:06.699666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:06.700360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:06.700537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:06.711595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:06.712326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:06.713046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:06.713345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:06.716343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:06.716547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:06.717725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:06.717789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:06.717921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:06.717963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:06.718001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:06.718186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:06.851612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.852994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.853055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 86233409546 2026-02-19T08:25:10.142292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:10.142527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:10.142786Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 Forgetting tablet 72075186233409546 2026-02-19T08:25:10.144181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2026-02-19T08:25:10.144396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 52] was 3 2026-02-19T08:25:10.145101Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2026-02-19T08:25:10.146462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2026-02-19T08:25:10.146693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 52] was 2 2026-02-19T08:25:10.146886Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:10.148091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-02-19T08:25:10.148346Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 2026-02-19T08:25:10.149660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 Forgetting tablet 72075186233409555 2026-02-19T08:25:10.150198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:10.150384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2026-02-19T08:25:10.151129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2026-02-19T08:25:10.151311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 52] was 1 2026-02-19T08:25:10.151730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:10.151814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 52], at schemeshard: 72057594046678944 2026-02-19T08:25:10.151888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:10.152207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:10.152286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:10.152416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:10.155155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:10.155198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:10.155396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:15 2026-02-19T08:25:10.155429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2026-02-19T08:25:10.155956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:14 2026-02-19T08:25:10.155997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2026-02-19T08:25:10.157770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:10.157806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:10.157868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:16 2026-02-19T08:25:10.157898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2026-02-19T08:25:10.158054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:10.158158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:10.158232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:10.158268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:10.158337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:10.159840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2026-02-19T08:25:10.160574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2026-02-19T08:25:10.160613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2026-02-19T08:25:10.161443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2026-02-19T08:25:10.161579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2026-02-19T08:25:10.161634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2542:4324] TestWaitNotification: OK eventTxId 139 2026-02-19T08:25:10.162931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:10.163113Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 198us result status StatusSuccess 2026-02-19T08:25:10.163523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 4 MaxPaths: 5 MaxChildrenInDir: 4 MaxAclBytesSize: 25 MaxTableColumns: 3 MaxTableColumnNameLength: 10 MaxTableKeyColumns: 1 MaxTableIndices: 20 MaxShards: 6 MaxShardsInPath: 4 MaxConsistentCopyTargets: 1 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 20 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::SimultaneousDefine >> IncrementalBackup::BasicIndexIncrementalRestore [GOOD] >> IncrementalBackup::CdcVersionSync >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: 2026-02-19T08:24:10.718294Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489256552019017:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:10.718389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:10.895222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:10.918033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:10.918168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:10.983652Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:10.988129Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489256552018986:2081] 1771489450716808 != 1771489450716811 2026-02-19T08:24:11.003094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:11.076693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:11.076722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:11.076735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:11.076828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:11.120241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:13.659029Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d8/r3tmp/spilling-tmp-runner/node_1_3585d997-f6f1bda5-e864dc90-443aa780, actor: [1:7608489269436922706:2384] 2026-02-19T08:24:13.659253Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d8/r3tmp/spilling-tmp-runner 2026-02-19T08:24:15.784039Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784085Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784105Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784126Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784143Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784168Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784193Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784217Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784240Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784273Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784299Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:15.784344Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzt7f1cq2sds502a30w2w, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2MTJjODctOThkNjQ2NGEtNWM3NDA3MjQtMjI2MjUyZmE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:15.784500Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489278026858383:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzt7f1cq2sds502a30w2w, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2MTJjODctOThkNjQ2NGEtNWM3NDA3MjQtMjI2MjUyZmE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:15.784530Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489278026858383:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzt7f1cq2sds502a30w2w, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODI2MTJjODctOThkNjQ2NGEtNWM3NDA3MjQtMjI2MjUyZmE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:15.784785Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:15.784904Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.784945Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.784998Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785064Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785124Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785170Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785211Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785251Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785291Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:15.785326Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 7608489507222156052:2743] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1218 Tasks { TaskId: 1 CpuTimeUs: 500 FinishTimeMs: 1771489509421 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" AffectedPartitions: 2 } ComputeCpuTimeUs: 48 BuildCpuTimeUs: 452 WaitOutputTimeUs: 2262 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489509421 CreateTimeMs: 1771489509348 UpdateTimeMs: 1771489509421 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.423240Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489507222156052:2743] 2026-02-19T08:25:09.423270Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489507222156054:2745], CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423292Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [4:7608489507222156054:2745], CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423374Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489507222156054:2745] TaskId# 3 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 352 Tasks { TaskId: 3 CpuTimeUs: 108 FinishTimeMs: 1771489509421 ComputeCpuTimeUs: 25 BuildCpuTimeUs: 83 WaitOutputTimeUs: 1560 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489509421 CreateTimeMs: 1771489509349 UpdateTimeMs: 1771489509421 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.423399Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489507222156054:2745], CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423421Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [4:7608489507222156054:2745], CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423507Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489507222156054:2745] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 575 DurationUs: 1000 Tasks { TaskId: 3 CpuTimeUs: 110 FinishTimeMs: 1771489509422 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" AffectedPartitions: 2 } ComputeCpuTimeUs: 27 BuildCpuTimeUs: 83 WaitOutputTimeUs: 1560 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489509421 CreateTimeMs: 1771489509349 UpdateTimeMs: 1771489509422 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.423528Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489507222156054:2745] 2026-02-19T08:25:09.423551Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423573Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423650Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489507222156055:2746] TaskId# 4 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 387 Tasks { TaskId: 4 CpuTimeUs: 125 ComputeCpuTimeUs: 28 BuildCpuTimeUs: 97 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771489509349 CurrentWaitInputTimeUs: 72186 UpdateTimeMs: 1771489509422 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.423671Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.423692Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489507222156055:2746], CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.441409Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489507222156055:2746] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 876 Tasks { TaskId: 4 CpuTimeUs: 172 FinishTimeMs: 1771489509440 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" AffectedPartitions: 2 } ComputeCpuTimeUs: 75 BuildCpuTimeUs: 97 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489509440 CreateTimeMs: 1771489509349 UpdateTimeMs: 1771489509440 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.441477Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489507222156055:2746] 2026-02-19T08:25:09.441520Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.441558Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:7608489507222156056:2747], trace_id# 2026-02-19T08:25:09.441681Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489507222156056:2747] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1851 DurationUs: 1000 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 895 FinishTimeMs: 1771489509441 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 185 BuildCpuTimeUs: 710 HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489509440 CreateTimeMs: 1771489509350 UpdateTimeMs: 1771489509441 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:09.441703Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489507222156056:2747] 2026-02-19T08:25:09.441989Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:25:09.442025Z node 4 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:25:09.442057Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [4:7608489507222156045:2538] TxId: 281474976710706. Ctx: { TraceId: 01khtg1fja4mh68rhr0mmvb80x, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZGNmZmEyMzMtODNmMTMwMmUtMzU2ZDQyYTMtM2FiZjc3NDI=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.005962s ReadRows: 0 ReadBytes: 0 RequestUnits# 3 ForceFlag# true trace_id# >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas >> BasicUsage::KeyedWriteSession_MultiThreadedWrite_Acks [GOOD] >> BasicUsage::KeyedWriteSession_IdleSessionsTimeout >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:10.583744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:10.583815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:10.583892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:10.583920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:10.583947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:10.583996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:10.584019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:10.584054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:10.584101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:10.584164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:10.584904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:10.585267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:10.659103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:10.659172Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:10.676286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:10.677085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:10.677255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:10.690285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:10.691063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:10.691887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:10.692214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:10.695263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:10.695510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:10.696847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:10.696923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:10.697063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:10.697159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:10.697221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:10.697435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:10.835464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.836516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.836649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.836732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.836863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.836933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:10.837632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:11.776615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:11.777148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:25:11.777536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:11.777585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:11.777743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:11.777993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:11.778032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:25:11.781394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:7 2026-02-19T08:25:11.781473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:11.781494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:11.781689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:11.781764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:11.782217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:11.782621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:11.782664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:11.782745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:11.782968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:25:11.782996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:25:11.783712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6237: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-02-19T08:25:11.784269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:11.784312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:11.784367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:11.784401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:11.784583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:11.785985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 101 2026-02-19T08:25:11.786232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:11.786270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:25:11.786342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:11.786422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:11.786853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:11.786944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:11.786998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:1085:2994] 2026-02-19T08:25:11.787111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:11.787165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:11.787185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1085:2994] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:11.787772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:11.787942Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2026-02-19T08:25:11.788103Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:11.788526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:11.788733Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 191us result status StatusPathDoesNotExist 2026-02-19T08:25:11.788853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:11.789246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:11.789385Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 145us result status StatusSuccess 2026-02-19T08:25:11.789801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 11 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:09.063112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:09.063209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:09.063280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:09.063319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:09.063359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:09.063399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:09.063470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:09.063506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:09.063585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:09.063692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:09.064599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:09.064957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:09.142225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:09.142282Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:09.154943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:09.155625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:09.155810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:09.168250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:09.169024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:09.169615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:09.169868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:09.172691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:09.172929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:09.173911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:09.173962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:09.174054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:09.174091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:09.174123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:09.174273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:09.309611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.310699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.310838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.310901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:09.311518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... bleBarrier }, at tablet# 72057594046678944 2026-02-19T08:25:12.247176Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 106:0 Done -> Done 2026-02-19T08:25:12.249642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.249724Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 106:0 ProgressState 2026-02-19T08:25:12.249866Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:25:12.249908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:25:12.249949Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:25:12.250016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:25:12.250060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-02-19T08:25:12.250131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1053:2975] message: TxId: 106 2026-02-19T08:25:12.250191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:25:12.250235Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:25:12.250271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 106:0 2026-02-19T08:25:12.250414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-02-19T08:25:12.250462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:12.252551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:25:12.252612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1230:3127] TestWaitNotification: OK eventTxId 106 2026-02-19T08:25:12.253367Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.253594Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 246us result status StatusSuccess 2026-02-19T08:25:12.254037Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:12.254810Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.255018Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 222us result status StatusSuccess 2026-02-19T08:25:12.255398Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 300 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:12.256098Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.256274Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 198us result status StatusSuccess 2026-02-19T08:25:12.256687Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 300 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:11.674880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:11.674978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:11.675053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:11.675108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:11.675159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:11.675199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:11.675236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:11.675284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:11.675361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:11.675447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:11.676369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:11.676697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:11.770763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:11.770826Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:11.781990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:11.782721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:11.782876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:11.797919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:11.798802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:11.799660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:11.800045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:11.803948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:11.804176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:11.805461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:11.805528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:11.805671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:11.805741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:11.805794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:11.806024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:11.959238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.960985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.961056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.961115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.961190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.961282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... erId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:12.817408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:12.817611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:12.817651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:25:12.820614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:7 2026-02-19T08:25:12.820686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:12.820708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:12.820759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:12.820863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:12.820945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:12.821058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:12.821235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:12.821272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:12.823336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:25:12.823390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:25:12.823516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6237: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-02-19T08:25:12.823616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:12.823641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:12.823693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:12.823722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:12.823810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:12.825173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 100 2026-02-19T08:25:12.825434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:12.825475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:25:12.825555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:12.825586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:25:12.825647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:12.825665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:12.826117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:12.826308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:12.826354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:12.826386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:1081:2990] 2026-02-19T08:25:12.826518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:12.826561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:12.826599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:1081:2990] 2026-02-19T08:25:12.826751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:12.826774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1081:2990] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:12.827261Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.827480Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2026-02-19T08:25:12.827654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:12.828120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.828264Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 143us result status StatusPathDoesNotExist 2026-02-19T08:25:12.828369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:12.828926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.829070Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 144us result status StatusSuccess 2026-02-19T08:25:12.829478Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 11 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:12.024084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:12.024167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.024221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:12.024270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:12.024303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:12.024340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:12.024359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:12.024400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:12.024456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.024521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:12.025177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:12.025477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:12.108153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:12.108510Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:12.119969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:12.120613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:12.120797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:12.132084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:12.132929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:12.133695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:12.134000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:12.136894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.137081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:12.138330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:12.138396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.138515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:12.138560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:12.138611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:12.138816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.250942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.251586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.251679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.251790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.251938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.251993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.252415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rd__operation_common_subdomain.cpp:84: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2026-02-19T08:25:12.928502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2026-02-19T08:25:12.928573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ConfigureParts -> Propose 2026-02-19T08:25:12.931456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.931623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.931744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.931806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.931850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:12.931908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-02-19T08:25:12.932047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:25:12.933820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-02-19T08:25:12.933949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000039 2026-02-19T08:25:12.934309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:12.934419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:12.934463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:12.934791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:25:12.934856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:12.935013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:12.935082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:25:12.937167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:12.937216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:12.937430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.937477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:25:12.937870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.937932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:25:12.938036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:12.938072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:12.938108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:12.938141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:12.938175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:25:12.938235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:12.938304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:25:12.938345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:25:12.938543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:25:12.938607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2026-02-19T08:25:12.938664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-02-19T08:25:12.939205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:12.939302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:12.939342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:12.939395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:25:12.939439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:12.939526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-02-19T08:25:12.939564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:710:2698] 2026-02-19T08:25:12.942857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:12.942973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:12.943013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:717:2705] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:12.943646Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:12.943856Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusSuccess 2026-02-19T08:25:12.944317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:12.304628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:12.304684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.304745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:12.304781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:12.304817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:12.304842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:12.304868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:12.304900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:12.304949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.304999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:12.305571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:12.305797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:12.363092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:12.363137Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:12.371932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:12.372487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:12.372637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:12.380816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:12.381406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:12.381950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:12.382180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:12.384598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.384766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:12.385904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:12.385982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.386083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:12.386128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:12.386163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:12.386331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.529963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.530932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.531938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:13.129698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:13.130646Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 2026-02-19T08:25:13.130792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-02-19T08:25:13.131107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-02-19T08:25:13.131714Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-02-19T08:25:13.131815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.132026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:25:13.132309Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-02-19T08:25:13.132451Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 2026-02-19T08:25:13.132554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:13.132715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:13.132960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:25:13.133108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:13.133318Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-02-19T08:25:13.133450Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-02-19T08:25:13.133564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:13.133774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:13.133982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:13.134120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:13.134157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:686:2674] 2026-02-19T08:25:13.134348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:13.134498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:13.134743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:13.134793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:13.134947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:13.135357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:13.135405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:13.135490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:13.138214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:13.138324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:13.138409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:13.141392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:25:13.141532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:13.141610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:13.141785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:13.141986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:13.142661Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:13.142902Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 242us result status StatusPathDoesNotExist 2026-02-19T08:25:13.143102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:13.143528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:13.143719Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 198us result status StatusSuccess 2026-02-19T08:25:13.144172Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:10.851871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:10.851945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:10.852000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:10.852049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:10.852080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:10.852124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:10.852166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:10.852196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:10.852250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:10.852334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:10.853087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:10.853365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:10.934798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:10.934854Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:10.946041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:10.946701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:10.946854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:10.957200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:10.957856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:10.958535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:10.958818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:10.961639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:10.961838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:10.963012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:10.963072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:10.963178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:10.963218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:10.963271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:10.963441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:11.098117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:11.099984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:13.052220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:13.052245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:13.053809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:13.053907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:13.054112Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:25:13.055489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.055785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:13.056394Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2026-02-19T08:25:13.056854Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:13.056997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:13.057247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 Forgetting tablet 72075186233409548 2026-02-19T08:25:13.059505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:13.059731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:13.060991Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-02-19T08:25:13.061125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:13.061319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 Forgetting tablet 72075186233409547 2026-02-19T08:25:13.062305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:13.062351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:25:13.062440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409549 2026-02-19T08:25:13.064004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:13.064067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:13.064208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:13.065002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:13.067723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:13.067781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:13.067869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:13.067893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:13.068020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:13.068047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:13.070134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:13.070189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:13.070412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:13.070650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:13.070774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:13.070830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:13.070942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:13.073526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:25:13.076046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:25:13.076095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:25:13.076626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:25:13.076728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:25:13.076772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1043:2997] TestWaitNotification: OK eventTxId 106 2026-02-19T08:25:13.094729Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:13.094943Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 275us result status StatusPathDoesNotExist 2026-02-19T08:25:13.095131Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:13.095960Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:13.096157Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 221us result status StatusPathDoesNotExist 2026-02-19T08:25:13.096290Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TxUsage::WriteToTopic_Demo_14_Query [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:12.297004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:12.297076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.297144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:12.297194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:12.297228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:12.297263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:12.297306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:12.297330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:12.297397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.297466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:12.298409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:12.298734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:12.371502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:12.371556Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:12.380462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:12.380979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:12.381130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:12.389105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:12.389700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:12.390261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:12.390534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:12.393222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.393405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:12.394461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:12.394517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:12.394636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:12.394673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:12.394703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:12.394846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:12.513417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:12.514909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 6: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:25:14.796379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-02-19T08:25:14.796767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:25:14.796813Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:25:14.796925Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:25:14.796963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:14.797005Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:25:14.797058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:14.797103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:25:14.797145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:25:14.797203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:25:14.797250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:25:14.797476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 9 2026-02-19T08:25:14.797535Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:25:14.797573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-02-19T08:25:14.797606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-02-19T08:25:14.798179Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:14.798244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:14.798274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:14.798305Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-02-19T08:25:14.798358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:14.799088Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:14.799166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:14.799194Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:14.799225Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:25:14.799254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 8 2026-02-19T08:25:14.799339Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:25:14.802802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:14.803172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:25:14.803478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:14.803566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:14.803966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:14.804055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:14.804096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1369:3203] TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:14.805189Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:14.805358Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 747us result status StatusSuccess 2026-02-19T08:25:14.805807Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:14.806356Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:14.806505Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2026-02-19T08:25:14.806956Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] >> TxUsage::WriteToTopic_Demo_45_Table >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:13.164343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:13.164419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.164476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:13.164535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:13.164585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:13.164624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:13.164667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:13.164700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:13.164765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.164840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:13.165819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:13.166137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:13.253426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:13.253482Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:13.266344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:13.267106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:13.267411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:13.280060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:13.280793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:13.281614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.281959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:13.285302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.285497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:13.286675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:13.286743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.286871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:13.286922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:13.286962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:13.287138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:13.430581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.431647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.431790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.431865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.431943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.432728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 395623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:25:15.395664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:25:15.395702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:25:15.395734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:25:15.396379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:15.396458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:15.396490Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:15.396531Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:25:15.396588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:15.397247Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:15.397319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:15.397344Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:15.397367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:25:15.397392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:15.397451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:25:15.399213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:25:15.399321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:15.399357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:15.399383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:15.401029Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:25:15.401341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:15.401598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:15.401829Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-02-19T08:25:15.402297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:15.402486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409548 2026-02-19T08:25:15.404528Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2026-02-19T08:25:15.404790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:15.405454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:15.405564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:15.405757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:15.406012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:15.406059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:15.406174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:15.407040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:15.407089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:15.407167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:15.409168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:15.409218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:15.409557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:15.409585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:15.410731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:15.410783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:15.410855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:15.411022Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:25:15.411304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:15.411353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:15.411719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:15.411801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:15.411832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:923:2878] TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:15.412394Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:15.412561Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 182us result status StatusPathDoesNotExist 2026-02-19T08:25:15.412725Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:13.332966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:13.333057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.333129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:13.333202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:13.333254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:13.333296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:13.333342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:13.333371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:13.333447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.333533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:13.334654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:13.334998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:13.432227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:13.432386Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:13.445973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:13.446726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:13.446916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:13.458347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:13.458919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:13.459503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.459797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:13.463613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.463787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:13.464898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:13.464968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.465082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:13.465118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:13.465148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:13.465292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:13.607435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.608953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.609430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... p:5681: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:15.433911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5697: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:15.434030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> ProposedDeleteParts 2026-02-19T08:25:15.434153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:15.434218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:15.434715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:15.435802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:25:15.437192Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:15.437232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:15.437367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:15.437502Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.437551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:25:15.437602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:25:15.437901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.437949Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:420: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-02-19T08:25:15.438003Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:15.438042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:15.438084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:15.438117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:15.438156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:25:15.438196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:15.438236Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:25:15.438273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:25:15.438357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:15.438400Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:25:15.438455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:25:15.438494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:25:15.439206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:15.439286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:15.439322Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:15.439367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:25:15.439417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:15.440176Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:15.440259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:15.440287Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:15.440319Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:25:15.440349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:15.440431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:25:15.440699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:15.440753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:15.440854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:15.441506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:15.441556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:15.441631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:15.443875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:15.445129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:15.445269Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:15.445348Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:25:15.445549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:15.445591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:25:15.446004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:15.446092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:15.446133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:749:2737] TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:15.446623Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:15.446792Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 225us result status StatusPathDoesNotExist 2026-02-19T08:25:15.446952Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:12.930260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:12.930343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.930425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:12.930464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:12.930499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:12.930536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:12.930580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:12.930624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:12.930710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:12.930786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:12.931611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:12.931908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:13.018655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:13.018719Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:13.030843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:13.031524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:13.031703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:13.043019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:13.043743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:13.044447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.044757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:13.047689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.047876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:13.049067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:13.049149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.049266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:13.049310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:13.049349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:13.049539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:13.182401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.183931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.184401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 02-19T08:25:15.728466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 3 2026-02-19T08:25:15.730474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 46, at schemeshard: 72057594046678944 2026-02-19T08:25:15.730712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2026-02-19T08:25:15.730952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:15.730988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:15.731163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 46] 2026-02-19T08:25:15.731235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.731267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1395:3254], at schemeshard: 72057594046678944, txId: 137, path id: 38 2026-02-19T08:25:15.731308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1395:3254], at schemeshard: 72057594046678944, txId: 137, path id: 46 2026-02-19T08:25:15.731358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.731394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2026-02-19T08:25:15.731602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-02-19T08:25:15.732575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-02-19T08:25:15.732654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-02-19T08:25:15.732686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-02-19T08:25:15.732740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 15 2026-02-19T08:25:15.732793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 12 2026-02-19T08:25:15.733595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-02-19T08:25:15.733668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-02-19T08:25:15.733696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-02-19T08:25:15.733803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 46], version: 1 2026-02-19T08:25:15.733833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 4 2026-02-19T08:25:15.733890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2026-02-19T08:25:15.736195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2026-02-19T08:25:15.736327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2026-02-19T08:25:15.736359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-02-19T08:25:15.736608Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-02-19T08:25:15.736761Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2026-02-19T08:25:15.736843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-02-19T08:25:15.736875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-02-19T08:25:15.736973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-02-19T08:25:15.737039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-02-19T08:25:15.737114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-02-19T08:25:15.737198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 137:0 CreateParts -> ConfigureParts 2026-02-19T08:25:15.738171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-02-19T08:25:15.739776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-02-19T08:25:15.741318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.741593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.741642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2026-02-19T08:25:15.741702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2026-02-19T08:25:15.742048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1341 RawX2: 4294970508 } TxBody: "\n\236\004\n\007Table11\020.\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020.:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 38 2026-02-19T08:25:15.744923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2026-02-19T08:25:15.745046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon >> TxUsage::WriteToTopic_Demo_27_Table >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer [GOOD] >> BasicUsage::ConflictingWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:15.382846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:15.382931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:15.382989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:15.383038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:15.383079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:15.383118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:15.383148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:15.383189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:15.383259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:15.383330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:15.384201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:15.384484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:15.476083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:15.476142Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:15.488218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:15.488906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:15.489074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:15.500127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:15.500834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:15.501543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:15.501822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:15.504786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.504960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:15.506055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:15.506119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.506257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:15.506305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:15.506345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:15.506485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.633898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.634922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.635986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 72057594046678944 PathId: 39 } 2026-02-19T08:25:16.455908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:16.456498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:16.457963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:7 msg type: 268697601 2026-02-19T08:25:16.458104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2026-02-19T08:25:16.458154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:7, partId: 0 2026-02-19T08:25:16.458420Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 7 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 39 } ObjectId: 40 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 39 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 7 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 39 } ObjectId: 40 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 39 } 2026-02-19T08:25:16.458619Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 7, type DataShard, boot OK, tablet id 72075186233409552 2026-02-19T08:25:16.458716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 7 TabletID: 72075186233409552 Origin: 72057594037968897 2026-02-19T08:25:16.458751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:7, partId: 0 2026-02-19T08:25:16.458844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 7 TabletID: 72075186233409552 Origin: 72057594037968897 2026-02-19T08:25:16.458888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-02-19T08:25:16.458933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 7 TabletID: 72075186233409552 Origin: 72057594037968897 2026-02-19T08:25:16.459019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 CreateParts -> ConfigureParts 2026-02-19T08:25:16.461367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.461589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.461651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 103:0 ProgressState at tabletId# 72057594046678944 2026-02-19T08:25:16.461758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 103:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409552 seqNo: 2:1 2026-02-19T08:25:16.502827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 103:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409552 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 129 RawX2: 4294969449 } TxBody: "\n\240\004\n\007table_0\020(\032\017\n\005RowId\030\004 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/table_0\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020(:\004\010\002\020\001" TxId: 103 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } SubDomainPathId: 39 2026-02-19T08:25:16.517089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409552 cookie: 72057594046678944:7 msg type: 269549568 2026-02-19T08:25:16.517243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409552 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:16.571097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:16.571373Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 273us result status StatusSuccess 2026-02-19T08:25:16.571880Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: false CreateTxId: 103 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:16.572593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:16.572761Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 168us result status StatusSuccess 2026-02-19T08:25:16.573041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: false CreateTxId: 103 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 1 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 1 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D5F48904100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:13.590054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:13.590118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.590171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:13.590223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:13.590251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:13.590279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:13.590308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:13.590343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:13.590426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.590518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:13.591214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:13.591471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:13.675745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:13.675794Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:13.694319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:13.695013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:13.695219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:13.707374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:13.708052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:13.708780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:13.709097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:13.712267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.712471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:13.713758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:13.713829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:13.713947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:13.713995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:13.714038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:13.714222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:13.865321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.866974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:13.867588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: E ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 Leader for TabletID 72057594046678944 is [1:889:2836] sender: [1:962:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 2026-02-19T08:25:17.839444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 2026-02-19T08:25:17.839636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 0 2026-02-19T08:25:17.839745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-02-19T08:25:17.839799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:17.839916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:25:17.839977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:17.840024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:17.840097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D5F48910100, stats written 2 2026-02-19T08:25:17.840479Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:17.840647Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 167us result status StatusSuccess 2026-02-19T08:25:17.841009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:16.764058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:16.764175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.764246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:16.764285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:16.764325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:16.764365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:16.764428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:16.764464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:16.764535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.764677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:16.765571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:16.765926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:16.848727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:16.848778Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:16.859256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:16.859805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:16.859952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:16.868962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:16.869460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:16.870063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:16.870305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:16.872549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.872709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:16.873966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:16.874044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.874182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:16.874231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:16.874278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:16.874482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.986303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.987828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:18.907519Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:420: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2026-02-19T08:25:18.907613Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:18.907651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:18.907702Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:18.907739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:18.907782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:25:18.907817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:18.907858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:25:18.907909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:25:18.907975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:18.908017Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-02-19T08:25:18.908051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:25:18.908084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:25:18.908996Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:18.909078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:18.909112Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:18.909156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:25:18.909195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:18.910181Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:18.910265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:18.910294Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:18.910323Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:18.910355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:18.910431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:25:18.911036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:18.911094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:18.911190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:18.911476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:18.911525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:18.911597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:18.913422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:18.914866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:18.914943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:18.915026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-02-19T08:25:18.915303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:18.915350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:25:18.915740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:18.915829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:18.915866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:804:2793] TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:18.916327Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:18.916503Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 187us result status StatusPathDoesNotExist 2026-02-19T08:25:18.916652Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:18.917188Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:18.917357Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 186us result status StatusSuccess 2026-02-19T08:25:18.917798Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:14.937602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:14.937682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:14.937763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:14.937800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:14.937834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:14.937872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:14.937920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:14.937952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:14.938016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:14.938090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:14.938976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:14.939275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:15.021440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:15.021490Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:15.034817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:15.035501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:15.035692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:15.047703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:15.048468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:15.049254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:15.049561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:15.052675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.052878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:15.054148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:15.054209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:15.054330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:15.054372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:15.054422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:15.054620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:15.185776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.186683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.186794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.186877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.186986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:15.187462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... entPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:25:18.813169Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2026-02-19T08:25:18.813207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 18 2026-02-19T08:25:18.813244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:25:18.813997Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:18.814078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:18.814114Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:25:18.814156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-02-19T08:25:18.814200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:18.814951Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:18.815031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:18.815058Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:25:18.815085Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:18.815113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:18.815176Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-02-19T08:25:18.822139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-02-19T08:25:18.822260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:18.822302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:18.823382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:25:18.823788Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:25:18.826238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:18.826560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 Forgetting tablet 72075186233409546 2026-02-19T08:25:18.828034Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2026-02-19T08:25:18.828585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:18.828756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:18.829545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:18.829589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:18.829693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:18.830227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:18.830264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:18.830320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:18.830925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:25:18.838222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:18.838683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:18.841093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:18.841150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:18.841226Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:18.841455Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-02-19T08:25:18.841867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-02-19T08:25:18.841914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-02-19T08:25:18.842590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:25:18.842700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:25:18.842743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:2599:4203] TestWaitNotification: OK eventTxId 108 2026-02-19T08:25:18.851518Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:18.851724Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 226us result status StatusPathDoesNotExist 2026-02-19T08:25:18.851875Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:18.852705Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:18.852865Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 181us result status StatusPathDoesNotExist 2026-02-19T08:25:18.853014Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpBatchUpdate::Large_3 [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D311D604100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:16.687582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:16.687680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.687749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:16.687813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:16.687852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:16.687896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:16.687927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:16.687952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:16.688015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.688086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:16.688961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:16.689284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:16.781574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:16.781633Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:16.793835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:16.794486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:16.794680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:16.806751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:16.807487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:16.808295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:16.808596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:16.811961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.812157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:16.813318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:16.813380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.813520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:16.813568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:16.813624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:16.813848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.962533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.963966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.964444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: E ... Indexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.189956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.190048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.190228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.190309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.190745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.191839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:19.196397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:19.200486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:19.200555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.200927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:19.200982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:19.201028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:19.202138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 Leader for TabletID 72057594046678944 is [1:975:2933] sender: [1:1031:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 2026-02-19T08:25:19.254512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 2026-02-19T08:25:19.285757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 0 2026-02-19T08:25:19.285977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:19.286048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-02-19T08:25:19.286100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:497: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-02-19T08:25:19.286160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:507: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D311D611900, stats written 1 2026-02-19T08:25:19.286714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:19.287019Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 289us result status StatusSuccess 2026-02-19T08:25:19.287563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 153 LastUpdateTime: 153 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:18.508483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:18.508573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.508638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:18.508689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:18.508740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:18.508775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:18.508817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:18.508843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:18.508908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.508980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:18.509859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:18.510131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:18.602066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:18.602138Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:18.613305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:18.614013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:18.614173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:18.624789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:18.625490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:18.626194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:18.626473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:18.629328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:18.629499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:18.630720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:18.630787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:18.630907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:18.630955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:18.631001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:18.631163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:18.767036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.767963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:18.768976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... d-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:20.191840Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:25:20.193385Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 100 2026-02-19T08:25:20.194960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:20.195135Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.195199Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2026-02-19T08:25:20.196767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:20.196933Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2026-02-19T08:25:20.198487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:20.198680Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.198772Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2026-02-19T08:25:20.200349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:20.200527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-02-19T08:25:20.200802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:20.200841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:25:20.200911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:20.200932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:25:20.201200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:20.201286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:20.201318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:700:2689] 2026-02-19T08:25:20.201420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:20.201477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:20.201494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:700:2689] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:20.201769Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.201891Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 161us result status StatusPathDoesNotExist 2026-02-19T08:25:20.202027Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:20.202358Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.202462Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 114us result status StatusPathDoesNotExist 2026-02-19T08:25:20.202553Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:20.202814Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.202911Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 115us result status StatusSuccess 2026-02-19T08:25:20.203175Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:16.521440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:16.521521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.521586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:16.521659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:16.521704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:16.521757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:16.521783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:16.521854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:16.521934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.522004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:16.522795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:16.523078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:16.611710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:16.611759Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:16.622493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:16.623454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:16.623630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:16.634278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:16.634980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:16.635619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:16.635871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:16.638888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.639055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:16.640056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:16.640117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.640259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:16.640305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:16.640344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:16.640485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.789569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.790583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.790720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.790813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.790936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.791693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.042217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:20.042273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:20.042552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:25:20.042866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:20.042957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:25:20.043038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:25:20.043687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.043758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:25:20.043862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.043939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:25:20.044017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done 2026-02-19T08:25:20.044947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:20.045064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:20.045112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:20.045163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-02-19T08:25:20.045205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:20.046416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:20.046513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:20.046545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:20.046577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:20.046610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:20.046688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:25:20.050009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.050083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:25:20.050449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:20.050689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:20.050751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:20.050792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:20.050826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:20.050888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:25:20.050965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:811:2776] message: TxId: 103 2026-02-19T08:25:20.051035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:20.051094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:25:20.051138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:25:20.051268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:20.051784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:20.051825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:20.053097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:20.053456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:20.055050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:20.055107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-02-19T08:25:20.055233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:20.055284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1135:3067] 2026-02-19T08:25:20.056131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:20.057836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.058062Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 275us result status StatusSuccess 2026-02-19T08:25:20.058647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D9E8A304100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:16.007237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:16.007319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.007382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:16.007419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:16.007452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:16.007507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:16.007534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:16.007593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:16.007665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.007735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:16.008527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:16.008808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:16.088126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:16.088171Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:16.097902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:16.102356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:16.102568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:16.110770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:16.111362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:16.111904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:16.112105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:16.115363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.115545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:16.116701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:16.116765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.116877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:16.116937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:16.117007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:16.117173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.226094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.226953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.227883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: E ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 Leader for TabletID 72057594046678944 is [1:892:2839] sender: [1:967:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 2026-02-19T08:25:20.130274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 2026-02-19T08:25:20.130383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 0 2026-02-19T08:25:20.130476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-02-19T08:25:20.130555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:20.130725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:25:20.130783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:20.130842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:20.130951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D9E8A310100, stats written 2 2026-02-19T08:25:20.131499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.134203Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 217us result status StatusSuccess 2026-02-19T08:25:20.134713Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:19.066092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:19.066165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:19.066236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:19.066277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:19.066311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:19.066342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:19.066382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:19.066423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:19.066485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:19.066562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:19.067321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:19.067588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:19.150814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:19.150861Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:19.161750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:19.162320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:19.162482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:19.172370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:19.172952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:19.173542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:19.173812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:19.176401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.176555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:19.177570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:19.177633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.177758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:19.177842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:19.177883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:19.178029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.314135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.315942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... nged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1009 RawX2: 4294970243 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-02-19T08:25:20.444200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-02-19T08:25:20.444298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1009 RawX2: 4294970243 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-02-19T08:25:20.444343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:25:20.444426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1009 RawX2: 4294970243 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2026-02-19T08:25:20.444480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:20.444521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.444564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:25:20.444602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 107:0 ProposedWaitParts -> Done 2026-02-19T08:25:20.448378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:20.448481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:20.448561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-02-19T08:25:20.451239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:20.451361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:20.451457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-02-19T08:25:20.451856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-02-19T08:25:20.451903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 107:2 ProgressState 2026-02-19T08:25:20.452012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 2/3 2026-02-19T08:25:20.452046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-02-19T08:25:20.452084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 2/3 2026-02-19T08:25:20.452130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-02-19T08:25:20.452171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2026-02-19T08:25:20.452391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.452547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.452780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.452816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 107:0 ProgressState 2026-02-19T08:25:20.452877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 3/3 2026-02-19T08:25:20.452899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-02-19T08:25:20.452925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 3/3 2026-02-19T08:25:20.452950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-02-19T08:25:20.452976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2026-02-19T08:25:20.453041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:887:2833] message: TxId: 107 2026-02-19T08:25:20.453089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-02-19T08:25:20.453133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:25:20.453164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:0 2026-02-19T08:25:20.453283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:25:20.453328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-02-19T08:25:20.453346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:1 2026-02-19T08:25:20.453373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:25:20.453394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-02-19T08:25:20.453416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:2 2026-02-19T08:25:20.453453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-02-19T08:25:20.455729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:25:20.455786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:941:2887] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-02-19T08:25:20.458937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:20.459382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:105: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 38] domain path: /MyRoot/USER_0 shardsToCreate: 6 shardsPerPath: 1 GetShardsInside: 4 MaxShards: 7 2026-02-19T08:25:20.459473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-02-19T08:25:20.459514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-02-19T08:25:20.461473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:20.461737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-02-19T08:25:20.462153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-02-19T08:25:20.462194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-02-19T08:25:20.462722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:25:20.462847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:25:20.462885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:1135:3052] TestWaitNotification: OK eventTxId 108 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: 2026-02-19T08:24:30.641292Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489341333350719:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:30.642088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:30.669060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:30.922734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:30.922842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:30.973788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:30.976124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:30.999306Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:31.025890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489341333350678:2081] 1771489470608626 != 1771489470608629 2026-02-19T08:24:31.094469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:31.094494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:31.094510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:31.094619Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:31.222372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:33.734216Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b9/r3tmp/spilling-tmp-runner/node_1_42f4b7e3-184f3d24-a4654f80-aec182ab, actor: [1:7608489354218254400:2385] 2026-02-19T08:24:33.734403Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b9/r3tmp/spilling-tmp-runner 2026-02-19T08:24:35.900190Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900247Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900279Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900304Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900331Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900369Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900406Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900439Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900472Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900508Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900544Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:35.900601Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0dwe5vfec4p7x3dw0dyq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NWVkZGUxNWMtOTMwNGVkNWMtMTMxNzYzZGQtNTI3NWM1ODY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:35.900792Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489362808190076:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0dwe5vfec4p7x3dw0dyq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NWVkZGUxNWMtOTMwNGVkNWMtMTMxNzYzZGQtNTI3NWM1ODY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:35.900829Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489362808190076:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0dwe5vfec4p7x3dw0dyq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NWVkZGUxNWMtOTMwNGVkNWMtMTMxNzYzZGQtNTI3NWM1ODY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:35.901163Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:35.901329Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901390Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901438Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901500Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901560Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901592Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901620Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901663Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901700Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:35.901758Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [Ow ... :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489549260137475:2868], CA [2:7608489549260137477:2870], CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.073041Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:7608489549260137475:2868], CA [2:7608489549260137477:2870], CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.073167Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489549260137477:2870] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 87640 DurationUs: 269000 Tasks { TaskId: 4 CpuTimeUs: 53745 FinishTimeMs: 1771489519071 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 50000 ReadBytes: 400000 AffectedPartitions: 2 } IngressRows: 50000 ComputeCpuTimeUs: 53688 BuildCpuTimeUs: 57 WaitOutputTimeUs: 69614 WaitInputTimeUs: 26745 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489518802 CreateTimeMs: 1771489518725 UpdateTimeMs: 1771489519071 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:25:19.073188Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489549260137477:2870] 2026-02-19T08:25:19.073216Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489549260137475:2868], CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.073238Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:7608489549260137475:2868], CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.074019Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489549260137475:2868] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 94590 DurationUs: 289000 Tasks { TaskId: 2 CpuTimeUs: 54924 FinishTimeMs: 1771489519073 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 50000 ReadBytes: 400000 AffectedPartitions: 2 } IngressRows: 50000 ComputeCpuTimeUs: 54306 BuildCpuTimeUs: 618 WaitOutputTimeUs: 45708 WaitInputTimeUs: 26861 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489518784 CreateTimeMs: 1771489518724 UpdateTimeMs: 1771489519073 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:25:19.074058Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489549260137475:2868] 2026-02-19T08:25:19.074092Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.074117Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.082271Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489549260137478:2871] TaskId# 5 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 30534 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 384 FinishTimeMs: 1771489519081 InputBytes: 4 ComputeCpuTimeUs: 285 BuildCpuTimeUs: 99 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489519081 CreateTimeMs: 1771489518725 UpdateTimeMs: 1771489519081 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:19.082332Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.082363Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489549260137474:2867], CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.082484Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489549260137474:2867] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 98366 DurationUs: 267000 Tasks { TaskId: 1 CpuTimeUs: 60143 FinishTimeMs: 1771489519081 OutputBytes: 1 Tables { TablePath: "/Root/LargeTable" ReadRows: 48000 ReadBytes: 384000 AffectedPartitions: 2 } IngressRows: 48000 ComputeCpuTimeUs: 59380 BuildCpuTimeUs: 763 WaitOutputTimeUs: 78064 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489518814 CreateTimeMs: 1771489518723 UpdateTimeMs: 1771489519081 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:25:19.082532Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489549260137474:2867] 2026-02-19T08:25:19.082560Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.082586Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489549260137478:2871], trace_id# 2026-02-19T08:25:19.082678Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489549260137478:2871] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 51483 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 385 FinishTimeMs: 1771489519081 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 286 BuildCpuTimeUs: 99 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489519081 CreateTimeMs: 1771489518725 UpdateTimeMs: 1771489519081 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:25:19.082698Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489549260137478:2871] 2026-02-19T08:25:19.082811Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:25:19.082841Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:25:19.082869Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489549260137467:2536] TxId: 281474976710758. Ctx: { TraceId: 01khtg1rpg8znkw5jxwe6ft4d1, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OGVlNDZhNzgtY2QyNzkzYzQtNTc0MDBmOTgtYzg4MmY2MDA=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.278413s ReadRows: 198000 ReadBytes: 1584000 RequestUnits# 198000 ForceFlag# true trace_id# >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest >> TSchemeShardSubDomainTest::RedefineErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DDA92D04100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:05.899692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:05.899785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:05.899861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:05.899892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:05.899920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:05.899951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:05.899974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:05.900013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:05.900075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:05.900134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:05.900824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:05.901104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:05.967885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:05.967929Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:05.976502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:05.977504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:05.977663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:05.985813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:05.987628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:05.988301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:05.988606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:05.994805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:05.994993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:05.996277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:05.996341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:05.996450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:05.996493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:05.996534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:05.996709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:06.131953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.132808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.132912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.132969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:06.133626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 2026-02-19T08:25:20.827347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 2026-02-19T08:25:20.848637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 1 2026-02-19T08:25:20.848787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:20.848842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:20.848998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 2026-02-19T08:25:20.861541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 2026-02-19T08:25:20.872508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 2 2026-02-19T08:25:20.872647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-02-19T08:25:20.872702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:20.872891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDA92D28100, stats written 3 2026-02-19T08:25:20.873568Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:20.875816Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 268us result status StatusSuccess 2026-02-19T08:25:20.876449Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DE400204100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:13.992705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:13.992788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.992864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:13.992912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:13.992947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:13.992997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:13.993048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:13.993083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:13.993148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:13.993224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:13.994107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:13.994409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:14.081699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:14.081765Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:14.091718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:14.092217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:14.092381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:14.102743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:14.103474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:14.104096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:14.104344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:14.107050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:14.107264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:14.108299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:14.108350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:14.108447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:14.108496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:14.108548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:14.108730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:14.250694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.251680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.251783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.251849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.251977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:14.252676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... 900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 Leader for TabletID 72057594046678944 is [2:972:2931] sender: [2:1032:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 2026-02-19T08:25:21.182509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 2026-02-19T08:25:21.236580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 0 2026-02-19T08:25:21.236749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:21.236800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-02-19T08:25:21.236844Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:497: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-02-19T08:25:21.236894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:507: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DE4001FF900, stats written 1 2026-02-19T08:25:21.237302Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:21.237494Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 220us result status StatusSuccess 2026-02-19T08:25:21.237938Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 152 LastUpdateTime: 152 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:18.972742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:18.972830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.972883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:18.972913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:18.972945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:18.972977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:18.973017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:18.973039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:18.973081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.973136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:18.973797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:18.974050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:19.048208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:19.048261Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:19.057627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:19.058511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:19.058689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:19.071465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:19.072295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:19.073061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:19.073352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:19.076424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.076556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:19.077685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:19.077790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.077914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:19.077972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:19.078014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:19.078183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.210406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.211940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.212498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:21.407328Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 197us result status StatusSuccess 2026-02-19T08:25:21.407740Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:21.408172Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:21.408342Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 169us result status StatusSuccess 2026-02-19T08:25:21.408676Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:21.409055Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:21.409174Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 128us result status StatusSuccess 2026-02-19T08:25:21.409452Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:21.409892Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:21.410039Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 179us result status StatusSuccess 2026-02-19T08:25:21.410343Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:20.141881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:20.141967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:20.142028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:20.142074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:20.142122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:20.142157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:20.142202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:20.142231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:20.142292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:20.142382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:20.143208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:20.143516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:20.232070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:20.232125Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:20.244127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:20.244826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:20.245019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:20.256257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:20.256962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:20.257683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:20.258014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:20.261242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:20.261445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:20.262737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:20.262806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:20.262969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:20.263023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:20.263070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:20.263256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:20.414271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.415981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.416057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.416115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.416207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:20.416271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:22.312325Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:117: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:22.312488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:25:22.314618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 106, at schemeshard: 72057594046678944 2026-02-19T08:25:22.314858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2026-02-19T08:25:22.315428Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:22.315607Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 198us result status StatusSuccess 2026-02-19T08:25:22.316017Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:22.316577Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:22.316747Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 190us result status StatusSuccess 2026-02-19T08:25:22.317175Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:22.317977Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:22.318128Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 175us result status StatusSuccess 2026-02-19T08:25:22.318422Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:22.319059Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:22.319216Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 185us result status StatusSuccess 2026-02-19T08:25:22.319584Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 41 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 41 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:22.224506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:22.224590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.224671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:22.224716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:22.224749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:22.224785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:22.224817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:22.224859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:22.224930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.225012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:22.225861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:22.226167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:22.317153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:22.317211Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:22.329392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:22.330018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:22.330214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:22.341849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:22.342672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:22.343411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:22.343738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:22.346955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.347166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:22.348504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:22.348572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.348698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:22.348744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:22.348796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:22.348993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:22.510219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.511920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.512006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.512087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.512150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.512223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.512323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... LAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 CreateParts -> ConfigureParts 2026-02-19T08:25:23.278246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.278304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:25:23.278354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 ConfigureParts -> Propose 2026-02-19T08:25:23.279993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.280045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.280090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2026-02-19T08:25:23.280135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2026-02-19T08:25:23.280243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:25:23.281626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2026-02-19T08:25:23.281757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000043 2026-02-19T08:25:23.282039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:23.282147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:23.282189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-02-19T08:25:23.282468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 Propose -> Done 2026-02-19T08:25:23.282519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-02-19T08:25:23.282698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:23.282769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2026-02-19T08:25:23.284374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:23.284421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:23.284572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.284613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 108, path id: 38 2026-02-19T08:25:23.284946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.284991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 108:0 ProgressState 2026-02-19T08:25:23.285072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:25:23.285102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:23.285151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:25:23.285187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:23.285222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-02-19T08:25:23.285258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:23.285291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-02-19T08:25:23.285320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 108:0 2026-02-19T08:25:23.285401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:25:23.285439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2026-02-19T08:25:23.285471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 38], 8 2026-02-19T08:25:23.285909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:23.286018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:23.286072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:25:23.286107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-02-19T08:25:23.286143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:23.286229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-02-19T08:25:23.288587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-02-19T08:25:23.288886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-02-19T08:25:23.288929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-02-19T08:25:23.289456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:25:23.289528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:25:23.289563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:1002:2956] TestWaitNotification: OK eventTxId 108 2026-02-19T08:25:23.290189Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:23.290362Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 187us result status StatusSuccess 2026-02-19T08:25:23.290805Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> IncrementalBackup::IncrementalBackupWithCoveringIndex [GOOD] >> IncrementalBackup::IncrementalBackupMultipleIndexes >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:23.419152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:23.419229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.419290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:23.419326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:23.419375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:23.419414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:23.419440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:23.419485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:23.419544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.419616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:23.420378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:23.420677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:23.504589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:23.504644Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:23.513828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:23.514419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:23.514574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:23.524404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:23.525007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:23.525559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:23.525802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:23.528175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.528327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:23.529135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:23.529187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.529299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:23.529343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:23.529377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:23.529505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.672286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.673986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.674432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... plete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:24.216431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:25:24.216908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:24.216935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:24.217049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:24.217108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.217130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:25:24.217179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:25:24.217400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:24.217432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:25:24.217510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:24.217535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:24.217570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:24.217604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:24.217663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:25:24.217724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:24.217758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:25:24.217791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:25:24.217860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:24.217905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:25:24.217936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:24.217966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:24.218466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:24.218537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:24.218562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:24.218597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:24.218648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:24.219178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:24.219259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:24.219294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:24.219319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:24.219344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:24.219410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:25:24.222342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:24.222594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-02-19T08:25:24.225000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:24.225190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2026-02-19T08:25:24.225264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2026-02-19T08:25:24.225434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-02-19T08:25:24.225542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-02-19T08:25:24.227505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:24.227660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-02-19T08:25:24.227852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:24.227879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:25:24.227940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:24.227962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:24.228317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:24.228401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:24.228489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:709:2697] 2026-02-19T08:25:24.228705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:24.228782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:24.228807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2697] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:23.672648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:23.672744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.672816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:23.672850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:23.672881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:23.672915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:23.672942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:23.672987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:23.673066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.673136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:23.673921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:23.674239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:23.761918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:23.761968Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:23.773535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:23.774217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:23.774393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:23.785150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:23.785857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:23.786560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:23.786852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:23.789829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.790021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:23.791171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:23.791235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.791369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:23.791414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:23.791452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:23.791632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.941560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.942542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.942660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.942738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.942890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.942973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.943560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... TabletID: 72075186233409550 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2026-02-19T08:25:24.800409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-02-19T08:25:24.800753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:24.802243Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:25:24.802383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.802564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 Forgetting tablet 72075186233409546 2026-02-19T08:25:24.804291Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-02-19T08:25:24.804639Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:24.805393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:24.805548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:24.806472Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-02-19T08:25:24.806929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:24.807100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409548 2026-02-19T08:25:24.807791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:24.807908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-02-19T08:25:24.809350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:25:24.809512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.809555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:24.809615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:24.810352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.810421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:24.810515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:24.812050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:24.812092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:25:24.812175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:24.812197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:24.814291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:24.814328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:24.814410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:24.814462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:24.814533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:24.814566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:24.814634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:24.814820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:24.814907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.814951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:24.815031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:24.816452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-02-19T08:25:24.816687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:25:24.816720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:25:24.817160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:25:24.817225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:25:24.817249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1178:3082] TestWaitNotification: OK eventTxId 106 2026-02-19T08:25:24.817733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:24.817920Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 171us result status StatusSuccess 2026-02-19T08:25:24.818222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:22.793014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:22.793104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.793180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:22.793218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:22.793255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:22.793300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:22.793353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:22.793395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:22.793465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.793551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:22.794478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:22.794829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:22.879783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:22.879835Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:22.890394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:22.891111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:22.891291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:22.901296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:22.902050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:22.902731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:22.903004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:22.905742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.905931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:22.906997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:22.907056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.907146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:22.907194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:22.907228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:22.907441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.049203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.050986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.051072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.051138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.051200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.051293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.051371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:24.955738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:24.955761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-02-19T08:25:24.956164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:25:24.956884Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-02-19T08:25:24.957010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.957239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:24.958295Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-02-19T08:25:24.958763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:24.961935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409546 2026-02-19T08:25:24.963839Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:24.963992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:24.964219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409548 2026-02-19T08:25:24.965252Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-02-19T08:25:24.965384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:24.965564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 Forgetting tablet 72075186233409547 2026-02-19T08:25:24.968765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.968820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:24.968902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409549 2026-02-19T08:25:24.970547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.970614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:24.970750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:24.972298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:25:24.972482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:25:24.972971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:24.973032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:24.973122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:24.973165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:24.976222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:24.976282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:24.976387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:24.976434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:24.976682Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:24.976785Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:24.976856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:24.976902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:24.976993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:24.979918Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-02-19T08:25:24.982526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:25:24.982600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-02-19T08:25:24.983061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:25:24.983164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:25:24.983208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1063:3015] TestWaitNotification: OK eventTxId 105 2026-02-19T08:25:24.983846Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:24.984045Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 252us result status StatusPathDoesNotExist 2026-02-19T08:25:24.984245Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:24.984880Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:24.985043Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2026-02-19T08:25:24.985181Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:24.385960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:24.386030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.386085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:24.386132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:24.386163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:24.386208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:24.386236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:24.386280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:24.386335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.386413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:24.387159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:24.387415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:24.454737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:24.454783Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:24.464416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:24.465029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:24.465180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:24.474718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:24.475430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:24.476050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.476281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:24.478827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.478976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:24.479724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:24.479770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.479887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:24.479931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:24.479963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:24.480071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:24.587120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.587942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.588582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rdinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:25.063796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:25.063983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:25:25.064022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:25.064119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:25.064155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:25.064195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:25:25.065684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:25.065737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:25.065861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:25.065936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:25.065973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:25:25.066028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:25:25.066077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.066106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:25:25.066200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:25.066231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:25.066263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:25.066290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:25.066325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:25:25.066359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:25.066390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:25:25.066438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:25:25.066504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:25.066536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:25:25.066566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:25.066592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:25.067412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:25.067495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:25.067537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:25.067575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:25.067619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:25.068632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:25.068706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:25.068750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:25.068778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:25.068807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:25.068877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:25:25.070736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:25.071471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-02-19T08:25:25.073339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:25.073488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.073659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2026-02-19T08:25:25.075342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-02-19T08:25:25.075495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-02-19T08:25:25.075702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:25.075729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:25:25.075794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:25.075809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:25.076154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:25.076227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:25.076248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:723:2711] 2026-02-19T08:25:25.076346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:25.076401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:25.076414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:723:2711] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> IncrementalBackup::CdcVersionSync [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:21.170123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:21.170187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:21.170234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:21.170271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:21.170301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:21.170338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:21.170368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:21.170402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:21.170462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:21.170545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:21.171227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:21.171464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:21.253636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:21.253693Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:21.267140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:21.267830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:21.267995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:21.281012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:21.281904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:21.282693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:21.282992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:21.286725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:21.286936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:21.288296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:21.288374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:21.288508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:21.288582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:21.288655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:21.288838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:21.429493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.430520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.430645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.430736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.430889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.430953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:21.431595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... ationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:25.802970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:25.803017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:25.803189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-02-19T08:25:25.803333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:25.803378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-02-19T08:25:25.803431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-02-19T08:25:25.803744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:25.803831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-02-19T08:25:25.803909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:25.803946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-02-19T08:25:25.803989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 ProposedWaitParts -> Done 2026-02-19T08:25:25.804825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:25.804918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:25.804952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:25.804999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-02-19T08:25:25.805045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-02-19T08:25:25.805875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:25.805950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:25.805973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:25.805998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-02-19T08:25:25.806025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-02-19T08:25:25.806089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:25:25.808675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:25.808726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-02-19T08:25:25.809079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-02-19T08:25:25.809285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:25.809335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.809377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:25.809408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.809443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:25:25.809496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:991:2929] message: TxId: 104 2026-02-19T08:25:25.809533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.809568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:25:25.809597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:25:25.809689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-02-19T08:25:25.810659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:25.810694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:25.811458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:25.811637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:25.812584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:25.812637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-02-19T08:25:25.812708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:25:25.812741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1342:3257] 2026-02-19T08:25:25.813378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-02-19T08:25:25.814267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-02-19T08:25:25.814467Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 199us result status StatusSuccess 2026-02-19T08:25:25.815006Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:24.484074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:24.484157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.484256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:24.484301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:24.484338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:24.484380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:24.484410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:24.484452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:24.484536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.484632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:24.485467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:24.485813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:24.573952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:24.574010Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:24.585082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:24.585732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:24.585949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:24.596614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:24.597282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:24.598070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.598375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:24.601391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.601590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:24.602785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:24.602866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.602988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:24.603043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:24.603105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:24.603267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:24.739925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.740673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.740763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.740817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.740907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.740961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.741426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... tionId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2026-02-19T08:25:25.836856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:595: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2026-02-19T08:25:25.836880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:600: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-02-19T08:25:25.836903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:706: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2026-02-19T08:25:25.852043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-02-19T08:25:25.852336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-02-19T08:25:25.901484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2026-02-19T08:25:25.901623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000039 2026-02-19T08:25:25.901724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:576: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000039 2026-02-19T08:25:25.901776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:228: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.901817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2026-02-19T08:25:25.901854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:706: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-02-19T08:25:25.902516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2026-02-19T08:25:25.902616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000039 2026-02-19T08:25:25.902670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:576: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000039 2026-02-19T08:25:25.902705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:228: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.902731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:25:25.902891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:25:25.903038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:25.903098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:25:25.906238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.906383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.906556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:25.906589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:25.906726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:25:25.906878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:25.906908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:743:2716], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-02-19T08:25:25.906939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:743:2716], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:25:25.907356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.907399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:25:25.907512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:25.907546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.907583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:25.907614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.907646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:25:25.907681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:25.907716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:25:25.907751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:25:25.907886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-02-19T08:25:25.907927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-02-19T08:25:25.907957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:25:25.907983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:25:25.908692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:25:25.908775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:25:25.908810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:25:25.908853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:25:25.908894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:25:25.909761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:25:25.909835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:25:25.909861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:25:25.909885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:25:25.909913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:25:25.909973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:25:25.914295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:25:25.916085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> TxUsage::WriteToTopic_Demo_45_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:24.089454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:24.089525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.089580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:24.089613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:24.089643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:24.089669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:24.089703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:24.089754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:24.089801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.089867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:24.090511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:24.090736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:24.152285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:24.152334Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:24.160715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:24.161354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:24.161491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:24.170108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:24.170861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:24.171551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.171843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:24.175041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.175241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:24.176400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:24.176469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.176584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:24.176629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:24.176675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:24.176844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:24.288224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.288935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.289635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... blishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:25:26.144226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:25:26.144245Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:25:26.144264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 8 2026-02-19T08:25:26.144309Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:25:26.146721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:25:26.147662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-02-19T08:25:26.147852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:26.147885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:25:26.147952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:26.147972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:25:26.148014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:26.148026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:25:26.148452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:26.148554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:26.148578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:1332:3176] 2026-02-19T08:25:26.148704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:26.148775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:26.148816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:26.148831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:1332:3176] 2026-02-19T08:25:26.148883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:26.148896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1332:3176] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-02-19T08:25:26.149263Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:26.149424Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 217us result status StatusSuccess 2026-02-19T08:25:26.149743Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 39 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:26.150106Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:26.150227Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 141us result status StatusSuccess 2026-02-19T08:25:26.150493Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 40 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 40 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:26.150819Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:26.150934Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 120us result status StatusSuccess 2026-02-19T08:25:26.151153Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D1D2D304100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:22.288273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:22.288360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.288423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:22.288463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:22.288513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:22.288569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:22.288606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:22.288659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:22.288722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:22.288799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:22.289638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:22.289952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:22.375904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:22.375963Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:22.387633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:22.388270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:22.388502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:22.399896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:22.400589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:22.401360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:22.401655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:22.405625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.405848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:22.406999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:22.407063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:22.407178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:22.407224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:22.407264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:22.407457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:22.545584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.549940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.550003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.550062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.550127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:22.550208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 Leader for TabletID 72057594046678944 is [1:889:2836] sender: [1:964:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 2026-02-19T08:25:26.369090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 2026-02-19T08:25:26.369265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 0 2026-02-19T08:25:26.369350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-02-19T08:25:26.369421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:26.369528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 1 2026-02-19T08:25:26.369741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:26.369775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:26.369835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D1D2D310100, stats written 2 2026-02-19T08:25:26.370233Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:26.370442Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 232us result status StatusSuccess 2026-02-19T08:25:26.370903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::RmDir >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:39.274064Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489380156126168:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:39.274154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:39.510441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:39.510548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:39.571995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:39.581697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:39.625399Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:39.629774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489380156126138:2081] 1771489479272026 != 1771489479272029 2026-02-19T08:24:39.691034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:39.691055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:39.691061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:39.691144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:39.742693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:40.101529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:40.102079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:40.104569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:40.105260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:40.107182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489480152, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:40.108190Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489380156126675:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:40.108267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:40.108329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-02-19T08:24:40.108384Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380156126106:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.108512Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380156126112:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.108521Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380156126109:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.108699Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380156126604:2210][/Root] Path was updated to new version: owner# [1:7608489380156126431:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.108710Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489380156126675:2250] Ack update: ack to# [1:7608489380156126498:2148], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:40.108862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-02-19T08:24:40.109445Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380156126890:2292][/Root] Path was updated to new version: owner# [1:7608489380156126884:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.109650Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380156126891:2293][/Root] Path was updated to new version: owner# [1:7608489380156126885:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.280650Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489384451094266:2486][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489380156126431:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.285119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:42.241327Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:42.243581Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393041028872:2491][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489380156126431:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.256210Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fd5/r3tmp/spilling-tmp-runner/node_1_940b2dc7-aec2d3a-c7ddfd43-1fdfcf45, actor: [1:7608489393041028880:2311] 2026-02-19T08:24:42.256449Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fd5/r3tmp/spilling-tmp-runner 2026-02-19T08:24:42.257483Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0kdw9nr2v3mmcr63dfhj", Request has 18444972584227.294154s seconds to be completed 2026-02-19T08:24:42.261385Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0kdw9nr2v3mmcr63dfhj", Created new session, sessionId: ydb://session/3?node_id=1&id=M2ZiNzQ4ZWQtZjM1ZmZkMTctMjM4NWY5Y2YtM2M4MDNlY2Y=, workerId: [1:7608489393041028895:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:42.261644Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0kdw9nr2v3mmcr63dfhj 2026-02-19T08:24:42.261705Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:42.261755Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:42.261780Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:42.262419Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393041028898:2494][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489380156126431:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.262497Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393041028897:2493][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489380156126431:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.289021Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0nhgatzzar7hnpzfr9cc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZiNzQ4ZWQtZjM1ZmZkMTctMjM4NWY5Y2YtM2M4MDNlY2Y=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489393041028895:2325] 2026-02-19T08:24:42.289066Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489393041028913:2496] 2026-02-19T08:24:42.290036Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393041028915:2497][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489380156126431:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.291166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489393041028914:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:42.291293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:42.291586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489393041028926:2330], DatabaseId: /Root, PoolId: default, Failed to fet ... e { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:26.315412Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489582263597729:2336] TxId: 281474976715662. Ctx: { TraceId: 01khtg20709ky385xma6y638pd, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzI5YTMwNzYtODY0MTUxOWEtOGVkNDExYmQtYTlmYmRkNzA=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:26.321066Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg20709ky385xma6y638pd, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzI5YTMwNzYtODY0MTUxOWEtOGVkNDExYmQtYTlmYmRkNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489582263597735:2352] 2026-02-19T08:25:26.321176Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg20709ky385xma6y638pd, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzI5YTMwNzYtODY0MTUxOWEtOGVkNDExYmQtYTlmYmRkNzA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489582263597734:2351] 2026-02-19T08:25:26.321760Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976715662, SelfId: [9:7608489582263597732:2350]. Received TEvPoison from: [9:7608489582263597729:2336] 2026-02-19T08:25:26.322027Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg20709ky385xma6y638pd", Forwarded response to sender actor, requestId: 4, sender: [9:7608489577968630332:2335], selfId: [9:7608489560788760445:2270], source: [9:7608489577968630333:2336] 2026-02-19T08:25:26.322521Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=MzI5YTMwNzYtODY0MTUxOWEtOGVkNDExYmQtYTlmYmRkNzA=, workerId: [9:7608489577968630333:2336], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:38.823747Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489374830501439:2228];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:38.823812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:38.831517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:38.894001Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.011965s 2026-02-19T08:24:39.239846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:39.239958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:39.285133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:39.291658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:39.340746Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489374830501238:2081] 1771489478759128 != 1771489478759131 2026-02-19T08:24:39.351192Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:39.403941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:39.403963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:39.403971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:39.404047Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:39.487134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:39.797995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:39.798378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:39.800004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:39.800743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:39.803093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489479851, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:39.804049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-02-19T08:24:39.804096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2026-02-19T08:24:39.804279Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489379125469066:2252] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-02-19T08:24:39.804585Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489374830501206:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.804714Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489374830501209:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.804766Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489374830501212:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.804985Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379125469001:2224][/Root] Path was updated to new version: owner# [1:7608489374830501518:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.805280Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379125469232:2292][/Root] Path was updated to new version: owner# [1:7608489379125469218:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.805525Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489379125469066:2252] Ack update: ack to# [1:7608489374830501584:2146], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:39.805550Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489379125469239:2293][/Root] Path was updated to new version: owner# [1:7608489379125469219:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.805727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-02-19T08:24:39.823369Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489379125469360:2487][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489374830501518:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.828588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:41.637799Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:41.639946Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fd9/r3tmp/spilling-tmp-runner/node_1_55e4977a-3674be76-1366ffbc-73574af9, actor: [1:7608489387715403965:2309] 2026-02-19T08:24:41.640151Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fd9/r3tmp/spilling-tmp-runner 2026-02-19T08:24:41.640477Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0k4fcm41dq6vjm3pztvq", Request has 18444972584227.911161s seconds to be completed 2026-02-19T08:24:41.644337Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0k4fcm41dq6vjm3pztvq", Created new session, sessionId: ydb://session/3?node_id=1&id=YmFiNjdmODEtMWYyMDE2MjItNWNmMmUxMzEtMjllZTllNzc=, workerId: [1:7608489387715403982:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:41.644577Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0k4fcm41dq6vjm3pztvq 2026-02-19T08:24:41.644655Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:41.644698Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:41.644729Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:41.646574Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489387715403985:2493][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489374830501518:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.646587Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489387715403984:2492][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489374830501518:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.646737Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489387715403986:2494][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489374830501518:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.684290Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0myk620jm4g9d3tjcexc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmFiNjdmODEtMWYyMDE2MjItNWNmMmUxMzEtMjllZTllNzc=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489387715403982:2325] 2026-02-19T08:24:41.684329Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489387715404008:2497] 2026-02-19T08:24:41.685295Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489387715404010:2498][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489374830501518:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.686411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489387715404009:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:41.686530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to f ... port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:26.117192Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489579428057617:2336] TxId: 281474976710662. Ctx: { TraceId: 01khtg1zzf5dc22kqf9g3wswcv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmMwYmEzNzMtNjk4NGQ5NC1lN2UwZTE5My1kZWI4ZjFhZA==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:26.121836Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg1zzf5dc22kqf9g3wswcv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmMwYmEzNzMtNjk4NGQ5NC1lN2UwZTE5My1kZWI4ZjFhZA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489579428057622:2351] 2026-02-19T08:25:26.121908Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg1zzf5dc22kqf9g3wswcv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MmMwYmEzNzMtNjk4NGQ5NC1lN2UwZTE5My1kZWI4ZjFhZA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489579428057623:2352] 2026-02-19T08:25:26.122038Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489579428057620:2350]. Received TEvPoison from: [9:7608489579428057617:2336] 2026-02-19T08:25:26.122622Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg1zzf5dc22kqf9g3wswcv", Forwarded response to sender actor, requestId: 4, sender: [9:7608489575133090219:2335], selfId: [9:7608489557953220349:2270], source: [9:7608489575133090220:2336] 2026-02-19T08:25:26.123129Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=MmMwYmEzNzMtNjk4NGQ5NC1lN2UwZTE5My1kZWI4ZjFhZA==, workerId: [9:7608489575133090220:2336], local sessions count: 0 >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::ForceDropTwice >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:38.848187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:38.848327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:38.959736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:38.959867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:38.965853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489373226796162:2081] 1771489478436290 != 1771489478436293 2026-02-19T08:24:38.987108Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:38.990093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:39.149807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:39.222271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:39.222304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:39.222314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:39.222406Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:39.545051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:39.629163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:39.629616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:39.635884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:39.636686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:39.639251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489479683, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:39.640209Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489373226796699:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:39.640266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:39.640298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-02-19T08:24:39.640401Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489373226796130:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.640439Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489373226796133:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.640512Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489373226796136:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.640720Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489373226796699:2251] Ack update: ack to# [1:7608489373226796520:2150], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:39.640731Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489373226796614:2206][/Root] Path was updated to new version: owner# [1:7608489373226796237:2104], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.640787Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489377521764216:2292][/Root] Path was updated to new version: owner# [1:7608489377521764207:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.640854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-02-19T08:24:39.641055Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489377521764217:2293][/Root] Path was updated to new version: owner# [1:7608489377521764208:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.641693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:39.850804Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489377521764295:2490][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489373226796237:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.799286Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:41.801002Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fda/r3tmp/spilling-tmp-runner/node_1_18f7b1d5-1fe7cd00-9af52765-87c78535, actor: [1:7608489386111698899:2309] 2026-02-19T08:24:41.801211Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fda/r3tmp/spilling-tmp-runner 2026-02-19T08:24:41.801770Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0jz89547d262b0m7p2fe", Request has 18444972584227.749874s seconds to be completed 2026-02-19T08:24:41.805995Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0jz89547d262b0m7p2fe", Created new session, sessionId: ydb://session/3?node_id=1&id=ZWUzMjc0OWQtM2Y0ODk2NzItMTlkOTcwOWEtYTJiODUzNzQ=, workerId: [1:7608489386111698916:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:41.806261Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0jz89547d262b0m7p2fe 2026-02-19T08:24:41.806335Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:41.806379Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:41.806414Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:41.807163Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489386111698918:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489373226796237:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.807300Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489386111698919:2496][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489373226796237:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.807486Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489386111698920:2497][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489373226796237:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.847382Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0n3p5djfkph7w2pt2vtw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUzMjc0OWQtM2Y0ODk2NzItMTlkOTcwOWEtYTJiODUzNzQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489386111698916:2325] 2026-02-19T08:24:41.847462Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489386111698942:2500] 2026-02-19T08:24:41.848558Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489386111698944:2501][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489373226796237:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.849798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489386111698943:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:41.849903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:41.850258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489386111698953:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, is ... TP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:27.183711Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489585908149896:2340] TxId: 281474976710663. Ctx: { TraceId: 01khtg21242ppsk0rpb9ch7kx0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGI0ZTNjZTMtYWE4MDJhOS03ZTQxMGNmNC0zNDEyODdk, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:27.190464Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg21242ppsk0rpb9ch7kx0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGI0ZTNjZTMtYWE4MDJhOS03ZTQxMGNmNC0zNDEyODdk, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489585908149901:2354] 2026-02-19T08:25:27.190539Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg21242ppsk0rpb9ch7kx0, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGI0ZTNjZTMtYWE4MDJhOS03ZTQxMGNmNC0zNDEyODdk, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489585908149902:2355] 2026-02-19T08:25:27.191171Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710663, SelfId: [9:7608489585908149899:2353]. Received TEvPoison from: [9:7608489585908149896:2340] 2026-02-19T08:25:27.191483Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg21242ppsk0rpb9ch7kx0", Forwarded response to sender actor, requestId: 4, sender: [9:7608489581613182499:2339], selfId: [9:7608489564433312594:2270], source: [9:7608489581613182500:2340] 2026-02-19T08:25:27.192759Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=OGI0ZTNjZTMtYWE4MDJhOS03ZTQxMGNmNC0zNDEyODdk, workerId: [9:7608489581613182500:2340], local sessions count: 0 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:28.222108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:28.222183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.222254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:28.222290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:28.222319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:28.222348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:28.222405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:28.222447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:28.222515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.222589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:28.223362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:28.223615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:28.302819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:28.302869Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:28.312657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:28.313266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:28.313416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:28.322090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:28.322780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:28.323371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:28.323609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:28.326126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.326294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:28.327303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:28.327370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.327467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:28.327509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:28.327545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:28.327681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:28.437230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.438959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.439028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-02-19T08:25:29.084331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:29.084421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:29.084460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:29.084783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:25:29.084840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-02-19T08:25:29.084989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:29.085058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-02-19T08:25:29.085102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:25:29.086711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.086750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.086895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:29.086974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.087012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:25:29.087051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:25:29.087346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.087384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:25:29.087461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:29.087490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:29.087528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:25:29.087552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:29.087602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:25:29.087635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:25:29.087672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:25:29.087698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:25:29.087930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-02-19T08:25:29.087972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-02-19T08:25:29.088014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:29.088047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:29.088600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:29.088689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:29.088735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:29.088778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:29.088811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:29.089977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:29.090051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:25:29.090080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:25:29.090104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:29.090153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-02-19T08:25:29.090225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-02-19T08:25:29.090263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:971:2886] 2026-02-19T08:25:29.092377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:29.093301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:25:29.093386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:29.093412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:972:2887] TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:29.093857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:29.094039Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2026-02-19T08:25:29.094571Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:40.195946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489385819150323:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:40.198535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:40.376562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:40.400947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:40.401076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:40.463270Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:40.465379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:40.467156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489385819150287:2081] 1771489480193152 != 1771489480193155 2026-02-19T08:24:40.536423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:40.550946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:40.550965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:40.550983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:40.551048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:40.925284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:40.925604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:40.927277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:40.927989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:40.930268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489480978, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:40.931234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-02-19T08:24:40.931420Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489385819150820:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-02-19T08:24:40.931748Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385819150255:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.931862Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385819150258:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.931906Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385819150261:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.931985Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385819151034:2293][/Root] Path was updated to new version: owner# [1:7608489385819151022:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.932135Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385819150734:2204][/Root] Path was updated to new version: owner# [1:7608489385819150582:2121], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.932280Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385819151032:2292][/Root] Path was updated to new version: owner# [1:7608489385819151021:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.932509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-02-19T08:24:40.932565Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489385819150820:2248] Ack update: ack to# [1:7608489385819150643:2148], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:40.932955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-02-19T08:24:41.199564Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489390114118416:2487][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489385819150582:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.205254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:43.116693Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.118548Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fce/r3tmp/spilling-tmp-runner/node_1_f2db1014-9e27ff3f-7ef08085-2fd36853, actor: [1:7608489398704053020:2309] 2026-02-19T08:24:43.118726Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fce/r3tmp/spilling-tmp-runner 2026-02-19T08:24:43.123312Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0m7d8109jpycdw796yqg", Request has 18444972584226.428321s seconds to be completed 2026-02-19T08:24:43.126729Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398704053039:2493][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489385819150582:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.130745Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0m7d8109jpycdw796yqg", Created new session, sessionId: ydb://session/3?node_id=1&id=OTc1NGI5OTUtMjhhNDNiNGQtN2QwZmZlMWMtMWI5ZmY2ZWQ=, workerId: [1:7608489398704053049:2326], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:43.130986Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0m7d8109jpycdw796yqg 2026-02-19T08:24:43.131039Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:43.131065Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:43.131091Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.135197Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398704053051:2494][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489385819150582:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.138395Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398704053038:2492][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489385819150582:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.164928Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0pcwbhd24xvanar0czv0, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1NGI5OTUtMjhhNDNiNGQtN2QwZmZlMWMtMWI5ZmY2ZWQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489398704053049:2326] 2026-02-19T08:24:43.164982Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489398704053063:2497] 2026-02-19T08:24:43.166141Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398704053065:2498][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489385819150582:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.167401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489398704053064:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.167519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.168026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489398704053074:2329], DatabaseId: /Root, PoolId: default, Failed to fe ... { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:27.747941Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489587445770068:2336] TxId: 281474976710662. Ctx: { TraceId: 01khtg21j3fnjbvdxjk5evyapc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTliZTk3Y2UtNWQ4Y2JlZDQtOWQ2MzUwMWYtNTIwMWU2NmE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:27.754244Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg21j3fnjbvdxjk5evyapc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTliZTk3Y2UtNWQ4Y2JlZDQtOWQ2MzUwMWYtNTIwMWU2NmE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489587445770073:2351] 2026-02-19T08:25:27.754343Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg21j3fnjbvdxjk5evyapc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTliZTk3Y2UtNWQ4Y2JlZDQtOWQ2MzUwMWYtNTIwMWU2NmE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489587445770074:2352] 2026-02-19T08:25:27.754549Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489587445770071:2350]. Received TEvPoison from: [9:7608489587445770068:2336] 2026-02-19T08:25:27.755186Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg21j3fnjbvdxjk5evyapc", Forwarded response to sender actor, requestId: 4, sender: [9:7608489587445769965:2334], selfId: [9:7608489570265900093:2270], source: [9:7608489587445769968:2336] 2026-02-19T08:25:27.755697Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=ZTliZTk3Y2UtNWQ4Y2JlZDQtOWQ2MzUwMWYtNTIwMWU2NmE=, workerId: [9:7608489587445769968:2336], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:16.514756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:16.514855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.514939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:16.514979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:16.515015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:16.515052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:16.515082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:16.515132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:16.515198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:16.515276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:16.516118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:16.516428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:16.609533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:16.609588Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:16.621087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:16.621720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:16.621889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:16.632727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:16.633461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:16.634265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:16.634597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:16.637590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.637815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:16.639070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:16.639161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:16.639286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:16.639330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:16.639372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:16.639538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:16.795398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.796962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:16.797437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 678944, LocalPathId: 40] was 3 2026-02-19T08:25:29.113245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.114742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.114793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:29.114935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:25:29.115039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.115080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 107, path id: 38 2026-02-19T08:25:29.115113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 107, path id: 40 2026-02-19T08:25:29.115502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.115576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:25:29.115660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.115716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:25:29.115769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 107:0 ProposedWaitParts -> Done 2026-02-19T08:25:29.116319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:25:29.116454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:25:29.116499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:25:29.116541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 14 2026-02-19T08:25:29.116582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:29.117288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:25:29.117344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-02-19T08:25:29.117365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-02-19T08:25:29.117387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:25:29.117408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:25:29.117457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-02-19T08:25:29.119340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.119386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:25:29.119661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:25:29.119769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:25:29.119801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:25:29.119831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-02-19T08:25:29.119860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:25:29.119911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-02-19T08:25:29.119941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-02-19T08:25:29.119970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-02-19T08:25:29.119995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 107:0 2026-02-19T08:25:29.120067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:25:29.120661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.120700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:29.121806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:29.122304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-02-19T08:25:29.122775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.122826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-02-19T08:25:29.123547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2026-02-19T08:25:29.124103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-02-19T08:25:29.124149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-02-19T08:25:29.124678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-02-19T08:25:29.124762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:25:29.124803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1394:3317] TestWaitNotification: OK eventTxId 107 2026-02-19T08:25:29.125439Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:29.125663Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 244us result status StatusSuccess 2026-02-19T08:25:29.126155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:24:39.425469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:39.538355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:39.544689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:24:39.545058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:24:39.545232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:24:39.963106Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:40.059264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:40.059395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:40.119933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:40.225131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:24:40.943024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:40.943666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:40.943709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:40.943744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:40.944356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:41.013919Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:41.604167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:24:44.932300Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:44.939461Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:24:44.943037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:24:44.988711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:44.988838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:45.047190Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:24:45.050262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:45.268019Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:45.268133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:45.269647Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.270446Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.271375Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272139Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272392Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272502Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272709Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272800Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.272970Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:24:45.288288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:45.514786Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:45.572732Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:24:45.572848Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:24:45.634723Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:24:45.635068Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:24:45.635296Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:24:45.635367Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:24:45.635436Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:24:45.635489Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:24:45.635544Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:24:45.635598Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:24:45.636206Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:24:45.643044Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:24:45.643163Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:24:45.679345Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:24:45.680281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:24:45.791974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:24:45.796299Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:24:45.816859Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:24:45.816932Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:24:45.816997Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:24:45.823041Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:24:45.827845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:24:45.837116Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:24:45.837280Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:24:45.851541Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:24:46.056177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:24:46.086402Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:24:46.421133Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:24:46.546401Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:24:46.546513Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:24:47.426667Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:15.749038Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4012:3539], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:15.752528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:15.811488Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:3994:3533], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-02-19T08:25:16.003220Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4081:3581], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:16.056238Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4080:3580] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:16.305884Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4102:3594]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:16.306193Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:16.306291Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4104:3596] 2026-02-19T08:25:16.306359Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4104:3596] 2026-02-19T08:25:16.306800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4105:3597] 2026-02-19T08:25:16.306944Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4104:3596], server id = [2:4105:3597], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:16.307050Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4105:3597], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:16.307131Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:25:16.307238Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:25:16.307337Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4102:3594], StatRequests.size() = 1 2026-02-19T08:25:16.307670Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:23.284970Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:3967:3519], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:25:23.285154Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:3967:3519], Start read next stream part 2026-02-19T08:25:23.286037Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:25:23.288141Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33150, txId: 281474976720660] shutting down 2026-02-19T08:25:23.288498Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:25:23.288584Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], Start read next stream part 2026-02-19T08:25:23.290852Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:3967:3519], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:25:23.290895Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:3967:3519], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTFiZDRmOGEtNmJmNDQyNGMtMzE0Nzg0YzAtODU1MjUzMjc=, TxId: 2026-02-19T08:25:23.353802Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4207:3677]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:23.353967Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:23.353996Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4207:3677], StatRequests.size() = 1 2026-02-19T08:25:27.388720Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:25:27.388852Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], Start read next stream part 2026-02-19T08:25:27.390902Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33160, txId: 281474976720663] shutting down 2026-02-19T08:25:27.391118Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2026-02-19T08:25:27.391518Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4271:3726], ActorId: [2:4274:3728], Starting query actor #1 [2:4277:3730] 2026-02-19T08:25:27.391580Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4274:3728], ActorId: [2:4277:3730], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:25:27.394446Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:25:27.394499Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3957:3515], ActorId: [2:4182:3656], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTY2ZDg1M2QtNjY1OTIzMWUtMTJlNjUzNjctNjIxNTY5MDY=, TxId: 2026-02-19T08:25:27.395268Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4274:3728], ActorId: [2:4277:3730], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTM5N2U5MzQtNzE1MDJhNjYtY2NmZmIyZDAtNzJlOGIxZDE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:25:27.457126Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4290:3742]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:27.457434Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:25:27.457486Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4290:3742], StatRequests.size() = 1 2026-02-19T08:25:27.601250Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4274:3728], ActorId: [2:4277:3730], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTM5N2U5MzQtNzE1MDJhNjYtY2NmZmIyZDAtNzJlOGIxZDE=, TxId: 2026-02-19T08:25:27.601336Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4274:3728], ActorId: [2:4277:3730], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTM5N2U5MzQtNzE1MDJhNjYtY2NmZmIyZDAtNzJlOGIxZDE=, TxId: 2026-02-19T08:25:27.601697Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4271:3726], ActorId: [2:4274:3728], Got response [2:4277:3730] SUCCESS 2026-02-19T08:25:27.602020Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:25:27.616474Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:25:27.616563Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3069:3434] 2026-02-19T08:25:27.617319Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4312:3756]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:27.617763Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:25:27.617830Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:27.618172Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:25:27.618224Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ] 2026-02-19T08:25:27.618289Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:27.618604Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:27.623655Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:27.624994Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 4 2026-02-19T08:25:27.625157Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:28.398502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:28.398575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.398650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:28.398684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:28.398720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:28.398753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:28.398779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:28.398822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:28.398900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.398973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:28.399737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:28.400030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:28.483984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:28.484032Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:28.496497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:28.497210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:28.497378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:28.507058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:28.507767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:28.508449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:28.508719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:28.511670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.511859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:28.512787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:28.512843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.512938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:28.512979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:28.513016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:28.513142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:28.628577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.629975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.630433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:25:29.288876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-02-19T08:25:29.288922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-02-19T08:25:29.290796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.290841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.291031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:29.291131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.291172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-02-19T08:25:29.291217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-02-19T08:25:29.291273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.291339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 100:0 ProgressState 2026-02-19T08:25:29.291452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:29.291483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:29.291519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:29.291552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:29.291586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-02-19T08:25:29.291621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:29.291659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-02-19T08:25:29.291688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 100:0 2026-02-19T08:25:29.291914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-02-19T08:25:29.291972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-02-19T08:25:29.292018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:29.292056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:29.293052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:29.293149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:29.293200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:29.293275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:29.293319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:29.294866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:29.294945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:29.294972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:29.294999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:29.295075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-02-19T08:25:29.295149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-02-19T08:25:29.295191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:958:2873] 2026-02-19T08:25:29.297875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:29.298766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:29.298847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:29.298876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:959:2874] TestWaitNotification: OK eventTxId 100 2026-02-19T08:25:29.299333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:29.299544Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 220us result status StatusSuccess 2026-02-19T08:25:29.300057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:25:29.302231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:29.302404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.302542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:25:29.304441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:29.304679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:52.931759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:53.044689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:53.066127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:53.066226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:53.066816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:53.332187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:53.332358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:53.412246Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489429943962 != 1771489429943966 2026-02-19T08:23:53.425736Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:53.479047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:53.570282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:53.887887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:837:2731], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.887957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.887996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:53.888074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:835:2729], Recipient [1:404:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-02-19T08:23:53.888114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:23:54.013815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:23:54.014084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:54.014306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:23:54.014350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:23:54.014542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:54.014643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:54.014747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:54.015554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:54.015737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:23:54.015784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:54.015820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:54.016012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.016059Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.016139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:54.016195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:23:54.016240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:23:54.016284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:23:54.016423Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:54.016917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:54.016963Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:54.017080Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.017115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.017185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:54.017235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:23:54.017277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:23:54.017347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:54.017762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:54.017812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:54.017910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.017941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:54.017993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:54.018037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:54.018086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:23:54.018122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:54.018161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:23:54.031062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:23:54.031847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:54.031930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:23:54.032108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:23:54.032417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269091328, Sender [1:400:2399], Recipient [1:404:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-02-19T08:23:54.032883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:843:2737], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:54.032937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:54.032978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:54.033122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [1:835:2729], Recipient [1:404:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-02-19T08:23:54.033160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:23:54.033233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight ... , PendingWrites: 0 2026-02-19T08:25:20.971724Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:20.971762Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:20.971792Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:20.994098Z node 9 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:25:20.994172Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:20.994205Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:20.994244Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:20.994276Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:21.025257Z node 9 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:25:21.025327Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.025358Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:21.025395Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.025426Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:21.046148Z node 9 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:25:21.046226Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.046260Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:21.046296Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.046328Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:21.067138Z node 9 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:25:21.067222Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.067257Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:21.067307Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.067342Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:21.088065Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [9:404:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:25:21.088153Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:25:21.088239Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [9:404:2403], Recipient [9:404:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:25:21.088272Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:25:21.123076Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:25:21.123208Z node 9 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:25:21.123246Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.123277Z node 9 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:25:21.123315Z node 9 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:25:21.123349Z node 9 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:25:21.135204Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [9:1716:3350], Recipient [9:404:2403]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection" Options { ReturnChildren: true } 2026-02-19T08:25:21.135319Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:25:21.137537Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [9:1718:3352], Recipient [9:404:2403]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-02-19T08:25:21.137640Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} ... waiting for SysViewsRoster update finished 2026-02-19T08:25:26.087974Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:26.095963Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:26.098798Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:320:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:26.099148Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:26.099218Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:26.374535Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:26.374728Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:26.408213Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1771489521946163 != 1771489521946167 2026-02-19T08:25:26.412264Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:26.457401Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:26.539037Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:26.859891Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:26.874159Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:26.981021Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:27.241601Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:27.241760Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:27.242378Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:27.243170Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:27.243365Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:27.248713Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:27.386673Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:25:27.422524Z node 10 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [10:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:29.204856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:29.204931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.204999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:29.205038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:29.205070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:29.205108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:29.205133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:29.205196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:29.205256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.205320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:29.206079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:29.206368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:29.288483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:29.288538Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:29.299359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:29.300016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:29.300145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:29.310163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:29.311055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:29.311791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:29.312139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:29.315856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.316055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:29.317181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.317264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.317374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:29.317422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.317461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:29.317645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.441861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.442870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.442987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.443820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... .146257Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2026-02-19T08:25:30.147270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:30.149738Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-02-19T08:25:30.150384Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2026-02-19T08:25:30.151118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-02-19T08:25:30.151322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409547 2026-02-19T08:25:30.152458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:30.152629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409549 2026-02-19T08:25:30.155383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:30.155436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:30.155557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:30.155767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:5 2026-02-19T08:25:30.155814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-02-19T08:25:30.156193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:30.156250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:25:30.156311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:30.157184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:1 2026-02-19T08:25:30.157228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-02-19T08:25:30.157309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:30.157330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-02-19T08:25:30.160201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:6 2026-02-19T08:25:30.160246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-02-19T08:25:30.160346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:2 2026-02-19T08:25:30.160371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-02-19T08:25:30.160511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:30.160565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-02-19T08:25:30.162552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:25:30.162646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2026-02-19T08:25:30.162842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:25:30.162876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-02-19T08:25:30.162923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:25:30.162935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:25:30.163266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:25:30.163353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.163377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1077:2979] 2026-02-19T08:25:30.163508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:25:30.163575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.163589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1077:2979] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:30.163913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.164051Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 144us result status StatusPathDoesNotExist 2026-02-19T08:25:30.164159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:25:30.164423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.164566Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 121us result status StatusSuccess 2026-02-19T08:25:30.164878Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:29.200264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:29.200352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.200418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:29.200454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:29.200497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:29.200555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:29.200584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:29.200623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:29.200693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.200763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:29.201568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:29.201894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:29.290921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:29.290971Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:29.302668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:29.303262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:29.303440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:29.313859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:29.314557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:29.315237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:29.315527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:29.318892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.319125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:29.320451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.320517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.320632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:29.320689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.320733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:29.320912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.473936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.475943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.476039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... , at schemeshard: 72057594046678944 2026-02-19T08:25:30.269869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:754:2727], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-02-19T08:25:30.269914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:754:2727], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-02-19T08:25:30.270307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.270357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 100:0 ProgressState 2026-02-19T08:25:30.270469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:30.270505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.270546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:30.270577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.270621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-02-19T08:25:30.270676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.270717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-02-19T08:25:30.270748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 100:0 2026-02-19T08:25:30.270975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:30.271028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-02-19T08:25:30.271076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:30.271114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:30.271702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.271797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.271838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:30.271874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:30.271965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:30.272643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.272715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.272741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:30.272786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:30.272823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:30.272892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-02-19T08:25:30.276023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:30.276138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-02-19T08:25:30.276410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:30.276455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-02-19T08:25:30.276909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:30.277000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.277035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:896:2842] TestWaitNotification: OK eventTxId 100 2026-02-19T08:25:30.277602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.277833Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 242us result status StatusSuccess 2026-02-19T08:25:30.278277Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:30.278884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.279098Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 205us result status StatusSuccess 2026-02-19T08:25:30.279521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:29.274457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:29.274531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.274591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:29.274625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:29.274658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:29.274695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:29.274720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:29.274768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:29.274841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.274926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:29.275705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:29.276076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:29.356558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:29.356608Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:29.366889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:29.367550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:29.367713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:29.378058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:29.378773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:29.379400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:29.379649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:29.382398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.382568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:29.383589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.383653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.383781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:29.383827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.383863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:29.384068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.516350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.517944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.518005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.518069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.518155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.518236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 026-02-19T08:25:30.143760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:25:30.143780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:25:30.143793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:25:30.143805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:25:30.143818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:25:30.143918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:30.144250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:30.144285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:30.144395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-02-19T08:25:30.144987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.145095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.145516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.145612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.145904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.146990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.147546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.151988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:30.154979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.155027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.155806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:30.155863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:30.155910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:30.157416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:863:2814] sender: [1:926:2058] recipient: [1:15:2062] 2026-02-19T08:25:30.224671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.224908Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 254us result status StatusSuccess 2026-02-19T08:25:30.225367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:30.225913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.226101Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 195us result status StatusSuccess 2026-02-19T08:25:30.226529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:29.553830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:29.553913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.553978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:29.554031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:29.554066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:29.554105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:29.554133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:29.554187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:29.554254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:29.554342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:29.555180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:29.555495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:29.646967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:29.647029Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:29.659267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:29.660157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:29.660330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:29.672202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:29.673004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:29.673771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:29.674048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:29.677241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.677430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:29.678682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:29.678974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:29.679088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:29.679133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:29.679174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:29.679338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:29.832139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.833994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.834058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:29.834144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-02-19T08:25:30.386888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 100:0 Propose -> Done 2026-02-19T08:25:30.386925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-02-19T08:25:30.387046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:25:30.387088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:30.387130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2026-02-19T08:25:30.388525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.388551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:30.388675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:30.388770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.388806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-02-19T08:25:30.388844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-02-19T08:25:30.389026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.389055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 100:0 ProgressState 2026-02-19T08:25:30.389135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:30.389160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.389198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-02-19T08:25:30.389222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.389248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-02-19T08:25:30.389274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-02-19T08:25:30.389298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-02-19T08:25:30.389319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 100:0 2026-02-19T08:25:30.389362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:25:30.389399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-02-19T08:25:30.389443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:25:30.389476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:25:30.389987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.390073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.390102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:30.390137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:25:30.390191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:30.390990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.391077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-02-19T08:25:30.391114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-02-19T08:25:30.391140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:25:30.391167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:30.391229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-02-19T08:25:30.393862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-02-19T08:25:30.394619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-02-19T08:25:30.394798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-02-19T08:25:30.394825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-02-19T08:25:30.394895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:25:30.394921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:25:30.395241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-02-19T08:25:30.395327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.395354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:717:2705] 2026-02-19T08:25:30.395443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:25:30.395526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.395558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:717:2705] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-02-19T08:25:30.395986Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.396189Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 239us result status StatusSuccess 2026-02-19T08:25:30.396550Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:40.644289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489384489297482:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:40.644922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:40.829571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:40.829680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:40.861925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:40.887338Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489384489297450:2081] 1771489480642553 != 1771489480642556 2026-02-19T08:24:40.897751Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:40.905018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:40.986612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:40.986639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:40.986662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:40.986750Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:41.087733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:41.357757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:41.358185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:41.360368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:41.360971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:41.363139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489481412, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:41.364011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:41.364048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-02-19T08:24:41.364179Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489384489297982:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:41.364507Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384489297418:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.364630Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384489297421:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.364671Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384489297424:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.364802Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489384489298095:2292][/Root] Path was updated to new version: owner# [1:7608489384489298078:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.364840Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489384489297898:2204][/Root] Path was updated to new version: owner# [1:7608489384489297744:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.365070Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489384489298096:2293][/Root] Path was updated to new version: owner# [1:7608489384489298080:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.365187Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489384489297982:2247] Ack update: ack to# [1:7608489384489297804:2146], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:41.365349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-02-19T08:24:41.650245Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489388784265580:2488][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489384489297744:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.651925Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:43.255855Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.257768Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fc4/r3tmp/spilling-tmp-runner/node_1_866316f3-ae91f99e-95a1f8c9-cd464588, actor: [1:7608489397374200184:2309] 2026-02-19T08:24:43.257964Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fc4/r3tmp/spilling-tmp-runner 2026-02-19T08:24:43.258386Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0mn7afqva6vv2jajy727", Request has 18444972584226.293247s seconds to be completed 2026-02-19T08:24:43.261878Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0mn7afqva6vv2jajy727", Created new session, sessionId: ydb://session/3?node_id=1&id=MWQ4NDcwM2ItNWRkMjVjNzctZDE3MzBjYy0xZWZjMmNmZA==, workerId: [1:7608489397374200201:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:43.262127Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0mn7afqva6vv2jajy727 2026-02-19T08:24:43.262179Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:43.262228Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:43.262256Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.263683Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397374200203:2493][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489384489297744:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.263834Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397374200204:2494][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489384489297744:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.263989Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397374200205:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489384489297744:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.299356Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0ph26f89r9kh8ry9jy44, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ4NDcwM2ItNWRkMjVjNzctZDE3MzBjYy0xZWZjMmNmZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489397374200201:2325] 2026-02-19T08:24:43.299409Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489397374200227:2498] 2026-02-19T08:24:43.302161Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397374200230:2499][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489384489297744:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.303235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489397374200228:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.303359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.303713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489397374200238:2329], DatabaseId: /Root, PoolId: default, Failed to fe ... rt: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:28.881204Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489589591650520:2337] TxId: 281474976710662. Ctx: { TraceId: 01khtg22p521f4zet8aej1chh1, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njc2MzNmNmQtNzYzMTg0NjItNmQyYzY5OGItYjQyMTJjMDE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:28.886725Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg22p521f4zet8aej1chh1, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njc2MzNmNmQtNzYzMTg0NjItNmQyYzY5OGItYjQyMTJjMDE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489589591650525:2351] 2026-02-19T08:25:28.886814Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg22p521f4zet8aej1chh1, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Njc2MzNmNmQtNzYzMTg0NjItNmQyYzY5OGItYjQyMTJjMDE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489589591650526:2352] 2026-02-19T08:25:28.886983Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489589591650523:2350]. Received TEvPoison from: [9:7608489589591650520:2337] 2026-02-19T08:25:28.887548Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg22p521f4zet8aej1chh1", Forwarded response to sender actor, requestId: 4, sender: [9:7608489589591650421:2336], selfId: [9:7608489568116813252:2270], source: [9:7608489589591650422:2337] 2026-02-19T08:25:28.887969Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=Njc2MzNmNmQtNzYzMTg0NjItNmQyYzY5OGItYjQyMTJjMDE=, workerId: [9:7608489589591650422:2337], local sessions count: 0 2026-02-19T08:25:28.925416Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7608489568116813022:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:25:28.925539Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:18.905432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:18.905519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.905598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:18.905653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:18.905689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:18.905746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:18.905780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:18.905825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:18.905902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:18.905986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:18.906845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:18.907179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:18.994896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:18.994974Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:19.007051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:19.007735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:19.007929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:19.019264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:19.020053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:19.020697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:19.020918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:19.023964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.024153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:19.025302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:19.025371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:19.025499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:19.025542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:19.025586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:19.025754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:19.159862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.160992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:19.161499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.473554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.473606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:30.473801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:25:30.473994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.474039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:25:30.474085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:25:30.474675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.474738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:25:30.474838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.474882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:25:30.474930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done 2026-02-19T08:25:30.476090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:30.476179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:30.476218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:30.476256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-02-19T08:25:30.476294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:30.477686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:30.477799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:30.477829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:30.477850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:30.477872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:25:30.477927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:25:30.480041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.480096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:25:30.480429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:25:30.480589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:30.480625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:30.480662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:30.480690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:30.480722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:25:30.480786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:811:2776] message: TxId: 103 2026-02-19T08:25:30.480829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:30.480865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:25:30.480894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:25:30.480983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:30.481700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.481756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:25:30.482066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:30.483463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:30.484196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.484237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-02-19T08:25:30.484431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:30.484462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1741:3664] 2026-02-19T08:25:30.484892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:30.488202Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.488392Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 214us result status StatusSuccess 2026-02-19T08:25:30.488815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DCB7A304100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:23.608333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:23.608428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.608518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:23.608565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:23.608595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:23.608627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:23.608661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:23.608684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:23.608758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.608841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:23.609651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:23.610005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:23.672303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:23.672351Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:23.681016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:23.681610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:23.681772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:23.690360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:23.691039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:23.691518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:23.691786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:23.694275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.694434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:23.695324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:23.695379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.695484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:23.695527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:23.695558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:23.695692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.805691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.808744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.808866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.808916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.809648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: E ... efCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:25:30.530857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:25:30.530879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:25:30.530904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:25:30.530927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:25:30.531078Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.531169Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.531272Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:30.531320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:25:30.531427Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.531517Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.531945Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-02-19T08:25:30.532485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.532601Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.532983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533062Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533508Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533566Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.533991Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.534478Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.534793Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.534880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.534950Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.535121Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.535183Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.535254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:25:30.535537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:25:30.535817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:25:30.541200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:30.546015Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.546099Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.546928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:30.546997Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:30.547049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:30.550040Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1016:2975] sender: [2:1072:2058] recipient: [2:15:2062] 2026-02-19T08:25:30.579341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 2026-02-19T08:25:30.621223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 0 2026-02-19T08:25:30.621447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:30.621523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-02-19T08:25:30.621605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:466: OLAP store contains 1 tables. 2026-02-19T08:25:30.621662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:488: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DCB7A301100, stats written 1 2026-02-19T08:25:30.622271Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:30.622511Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 266us result status StatusSuccess 2026-02-19T08:25:30.623253Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:39.110069Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489380488273228:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:39.110107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:39.210243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:24:39.428236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:39.428323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:39.437032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:39.464587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:39.520657Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489380488273174:2081] 1771489479095971 != 1771489479095974 2026-02-19T08:24:39.525857Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:39.610418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:39.610448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:39.610461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:39.610544Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:39.751684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:39.980992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:39.981427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:39.983224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:39.983888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:39.985765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489480033, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:39.986769Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489380488273709:2249] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:39.986779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:39.986822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-02-19T08:24:39.986974Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380488273142:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.987123Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380488273145:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.987127Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489380488273148:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:39.987295Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380488273928:2293][/Root] Path was updated to new version: owner# [1:7608489380488273870:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.987334Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380488273670:2221][/Root] Path was updated to new version: owner# [1:7608489380488273500:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:39.987344Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489380488273709:2249] Ack update: ack to# [1:7608489380488273526:2144], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:39.987490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-02-19T08:24:39.987579Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489380488273897:2292][/Root] Path was updated to new version: owner# [1:7608489380488273869:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.086499Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489384783241303:2486][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489380488273500:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.162036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:42.044602Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:42.046421Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fd7/r3tmp/spilling-tmp-runner/node_1_1ae677f7-aa6397c8-7146c013-e822eda2, actor: [1:7608489393373175907:2309] 2026-02-19T08:24:42.046662Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fd7/r3tmp/spilling-tmp-runner 2026-02-19T08:24:42.047118Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0k9y59h083wyw1ypqmze", Request has 18444972584227.504538s seconds to be completed 2026-02-19T08:24:42.051166Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0k9y59h083wyw1ypqmze", Created new session, sessionId: ydb://session/3?node_id=1&id=ZGY5ZWRiMTMtYTUyYTQwNzgtMmEwMDI4MzctNWQ1NzBmMzY=, workerId: [1:7608489393373175924:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:42.051407Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0k9y59h083wyw1ypqmze 2026-02-19T08:24:42.051468Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:42.051526Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:42.051563Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:42.052591Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393373175927:2492][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489380488273500:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.052659Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393373175926:2491][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489380488273500:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.052784Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393373175928:2493][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489380488273500:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.080558Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0nazcg24q1w2m3mgm4wd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY5ZWRiMTMtYTUyYTQwNzgtMmEwMDI4MzctNWQ1NzBmMzY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489393373175924:2325] 2026-02-19T08:24:42.080616Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489393373175950:2496] 2026-02-19T08:24:42.081665Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489393373175952:2497][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489380488273500:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:42.082787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489393373175951:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:42.082899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... e { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. 2026-02-19T08:25:29.511046Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489592934387697:2337] TxId: 281474976710662. Ctx: { TraceId: 01khtg23a7fqpzpyf690et7a2x, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ODA4NGY5MjUtNWE1NWNjMGEtMjFkNzc0NjItNmZjYmYxZDc=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:29.517697Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg23a7fqpzpyf690et7a2x, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ODA4NGY5MjUtNWE1NWNjMGEtMjFkNzc0NjItNmZjYmYxZDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489592934387703:2352] 2026-02-19T08:25:29.517792Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg23a7fqpzpyf690et7a2x, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ODA4NGY5MjUtNWE1NWNjMGEtMjFkNzc0NjItNmZjYmYxZDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489592934387702:2351] 2026-02-19T08:25:29.517966Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489592934387700:2350]. Received TEvPoison from: [9:7608489592934387697:2337] 2026-02-19T08:25:29.518883Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg23a7fqpzpyf690et7a2x", Forwarded response to sender actor, requestId: 4, sender: [9:7608489592934387598:2336], selfId: [9:7608489571459550429:2270], source: [9:7608489592934387599:2337] 2026-02-19T08:25:29.519107Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=ODA4NGY5MjUtNWE1NWNjMGEtMjFkNzc0NjItNmZjYmYxZDc=, workerId: [9:7608489592934387599:2337], local sessions count: 0 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BasicUsage::KeyedWriteSession_IdleSessionsTimeout [GOOD] >> BasicUsage::KeyedWriteSession_BoundPartitionChooser_SplitPartition_MultiThreadedAcksOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:40.124310Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489385654989514:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:40.125473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:40.337054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:40.337148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:40.367075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:40.382291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:40.419562Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:40.421047Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489385654989484:2081] 1771489480122682 != 1771489480122685 2026-02-19T08:24:40.504016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:40.504048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:40.504056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:40.504134Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:40.531182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:40.861537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:40.862022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:40.863870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:40.864582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:40.867530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489480915, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:40.869146Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489385654990014:2245] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:40.869235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:40.869290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-02-19T08:24:40.869360Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385654989452:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.869523Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385654989455:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.869560Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489385654989458:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:40.869782Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489385654990014:2245] Ack update: ack to# [1:7608489385654989838:2146], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:40.869811Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385654989954:2222][/Root] Path was updated to new version: owner# [1:7608489385654989777:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.869812Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385654990131:2292][/Root] Path was updated to new version: owner# [1:7608489385654990125:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:40.869971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-02-19T08:24:40.870119Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489385654990132:2293][/Root] Path was updated to new version: owner# [1:7608489385654990126:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.129987Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489389949957612:2485][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489385654989777:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.130304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:43.189603Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.190960Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fd3/r3tmp/spilling-tmp-runner/node_1_6cf9e8b2-67952b11-4c73e91b-fcd883c3, actor: [1:7608489398539892220:2312] 2026-02-19T08:24:43.198275Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398539892221:2491][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489385654989777:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.205905Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fd3/r3tmp/spilling-tmp-runner 2026-02-19T08:24:43.206319Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0m5rbeemwf7eyq7r121s", Request has 18444972584226.345326s seconds to be completed 2026-02-19T08:24:43.210766Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0m5rbeemwf7eyq7r121s", Created new session, sessionId: ydb://session/3?node_id=1&id=ZWRhY2MwMzQtYjNiZTQxMDUtNjQwMWQ3ZS04MGRlNGJkNg==, workerId: [1:7608489398539892243:2326], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:43.211039Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0m5rbeemwf7eyq7r121s 2026-02-19T08:24:43.211131Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:43.211155Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:43.211199Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.214925Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398539892245:2493][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489385654989777:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.215053Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398539892246:2494][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489385654989777:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.246654Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0pfe1y5gxvddpan7v4bc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhY2MwMzQtYjNiZTQxMDUtNjQwMWQ3ZS04MGRlNGJkNg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489398539892243:2326] 2026-02-19T08:24:43.246688Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489398539892261:2496] 2026-02-19T08:24:43.251293Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398539892264:2497][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489385654989777:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.252520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489398539892262:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.252641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:43.253323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489398539892272:2330], DatabaseId: /Root, PoolId: default, Failed to fe ... type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:30.364861Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489598967282204:2337] TxId: 281474976710662. Ctx: { TraceId: 01khtg244becjwsv9795y2d7s6, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTJhNzIwNzUtYjY4ZDc5ZTgtNzIyMWNjNjAtOTM1NTRmMWU=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:30.370787Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg244becjwsv9795y2d7s6, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTJhNzIwNzUtYjY4ZDc5ZTgtNzIyMWNjNjAtOTM1NTRmMWU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489598967282209:2351] 2026-02-19T08:25:30.370898Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg244becjwsv9795y2d7s6, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTJhNzIwNzUtYjY4ZDc5ZTgtNzIyMWNjNjAtOTM1NTRmMWU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489598967282210:2352] 2026-02-19T08:25:30.371052Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489598967282207:2350]. Received TEvPoison from: [9:7608489598967282204:2337] 2026-02-19T08:25:30.371634Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg244becjwsv9795y2d7s6", Forwarded response to sender actor, requestId: 4, sender: [9:7608489594672314809:2336], selfId: [9:7608489577492444931:2270], source: [9:7608489594672314810:2337] 2026-02-19T08:25:30.372056Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=ZTJhNzIwNzUtYjY4ZDc5ZTgtNzIyMWNjNjAtOTM1NTRmMWU=, workerId: [9:7608489594672314810:2337], local sessions count: 0 >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007E06D8F04100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:28.140380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:28.140465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.140542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:28.140613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:28.140658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:28.140696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:28.140726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:28.140772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:28.140858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:28.140935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:28.141778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:28.142081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:28.210412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:28.210459Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:28.219939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:28.220545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:28.220684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:28.228993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:28.229616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:28.230221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:28.230450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:28.232963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.233140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:28.234071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:28.234127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:28.234223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:28.234257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:28.234287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:28.234446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:28.352895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.353846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.353943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.353992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:28.354750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 Leader for TabletID 72057594046678944 is [1:895:2842] sender: [1:974:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 2026-02-19T08:25:32.230504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 2026-02-19T08:25:32.230682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 0 2026-02-19T08:25:32.230755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-02-19T08:25:32.230809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:32.231021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 1 2026-02-19T08:25:32.231329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:32.231366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:32.231447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007E06D8F10100, stats written 2 2026-02-19T08:25:32.232002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:32.235855Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 239us result status StatusSuccess 2026-02-19T08:25:32.236300Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:42.842330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489394602567444:2181];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:42.842560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:43.055089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:43.083067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:43.083185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:43.121961Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:43.124074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:43.124418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489394602567282:2081] 1771489482822398 != 1771489482822401 2026-02-19T08:24:43.254234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:43.254261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:43.254271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:43.254351Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:43.263996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:43.690532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:43.690996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:43.693980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:43.694744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:43.697315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489483743, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:43.698573Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489398897535112:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:43.698618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:43.698673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-02-19T08:24:43.698843Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489394602567250:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:43.698887Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489394602567253:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:43.699033Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489394602567256:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:43.699175Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489398897535035:2207][/Root] Path was updated to new version: owner# [1:7608489394602567570:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.699241Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489398897535112:2247] Ack update: ack to# [1:7608489394602567637:2146], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:43.699428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-02-19T08:24:43.699779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:24:43.699864Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489398897535190:2292][/Root] Path was updated to new version: owner# [1:7608489398897535182:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.700109Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489398897535191:2293][/Root] Path was updated to new version: owner# [1:7608489398897535183:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.843984Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:43.845079Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489398897535417:2490][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489394602567570:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:45.987157Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:45.989023Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fc1/r3tmp/spilling-tmp-runner/node_1_6010416c-7a420b4-12a46bad-2e374435, actor: [1:7608489407487470024:2310] 2026-02-19T08:24:45.989223Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fc1/r3tmp/spilling-tmp-runner 2026-02-19T08:24:45.989778Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0py6dmqhx10qw0xw7sfn", Request has 18444972584223.561859s seconds to be completed 2026-02-19T08:24:45.993823Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0py6dmqhx10qw0xw7sfn", Created new session, sessionId: ydb://session/3?node_id=1&id=NWEyMjhkODktN2Y2Y2E4NTQtNTNlYTA4ZTAtYTkyZDVmM2Q=, workerId: [1:7608489407487470041:2326], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:45.994003Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0py6dmqhx10qw0xw7sfn 2026-02-19T08:24:45.994056Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:45.994075Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:45.994135Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:45.997419Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489407487470043:2497][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489394602567570:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:45.997539Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489407487470044:2498][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489394602567570:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:45.997628Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489407487470045:2499][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489394602567570:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:46.017155Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0s602t1ngs6dgzrnmh8f, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NWEyMjhkODktN2Y2Y2E4NTQtNTNlYTA4ZTAtYTkyZDVmM2Q=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489407487470041:2326] 2026-02-19T08:24:46.017206Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489411782437363:2502] 2026-02-19T08:24:46.018581Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489411782437365:2503][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489394602567570:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:46.020070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489411782437364:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:46.020230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:46.020 ... tring" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:31.103767Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489603883357022:2337] TxId: 281474976710662. Ctx: { TraceId: 01khtg24tx3tcjpp3pc1fqgqwv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YmY4ZDIwNi1iOTJiZWJkMy1kOGQ1ODIwZC03NjAwOGMz, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:31.109905Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg24tx3tcjpp3pc1fqgqwv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YmY4ZDIwNi1iOTJiZWJkMy1kOGQ1ODIwZC03NjAwOGMz, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489603883357028:2351] 2026-02-19T08:25:31.109992Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg24tx3tcjpp3pc1fqgqwv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YmY4ZDIwNi1iOTJiZWJkMy1kOGQ1ODIwZC03NjAwOGMz, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489603883357029:2352] 2026-02-19T08:25:31.110160Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489603883357026:2350]. Received TEvPoison from: [9:7608489603883357022:2337] 2026-02-19T08:25:31.110822Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg24tx3tcjpp3pc1fqgqwv", Forwarded response to sender actor, requestId: 4, sender: [9:7608489599588389627:2336], selfId: [9:7608489582408519754:2270], source: [9:7608489599588389628:2337] 2026-02-19T08:25:31.111233Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=YmY4ZDIwNi1iOTJiZWJkMy1kOGQ1ODIwZC03NjAwOGMz, workerId: [9:7608489599588389628:2337], local sessions count: 0 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: 2026-02-19T08:24:40.877829Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489384632975370:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:40.883147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:41.108595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:41.108737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:41.135493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:41.154960Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:41.156223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:41.156492Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489384632975325:2081] 1771489480874574 != 1771489480874577 2026-02-19T08:24:41.261002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:41.261027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:41.261033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:41.261123Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:41.341390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:41.653621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-02-19T08:24:41.654135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:24:41.658456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:24:41.659627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:24:41.661855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489481706, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:24:41.662888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-02-19T08:24:41.662944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-02-19T08:24:41.663219Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7608489388927943150:2245] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-02-19T08:24:41.663691Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384632975293:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.663829Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384632975296:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.663910Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7608489384632975299:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-02-19T08:24:41.664025Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489388927943268:2293][/Root] Path was updated to new version: owner# [1:7608489388927943261:2288], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.664193Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489388927943074:2207][/Root] Path was updated to new version: owner# [1:7608489384632975617:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.664453Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7608489388927943267:2292][/Root] Path was updated to new version: owner# [1:7608489388927943260:2287], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:41.664669Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7608489388927943150:2245] Ack update: ack to# [1:7608489384632975680:2147], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-02-19T08:24:41.664852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-02-19T08:24:41.883145Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:24:41.884971Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489388927943458:2491][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7608489384632975617:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.955298Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.956814Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/004fc2/r3tmp/spilling-tmp-runner/node_1_89f6f765-5191938f-457c4fb3-81057fb3, actor: [1:7608489397517878065:2310] 2026-02-19T08:24:43.956979Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/004fc2/r3tmp/spilling-tmp-runner 2026-02-19T08:24:43.962086Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtg0mykbx6xehbw6pf36yz2", Request has 18444972584225.589562s seconds to be completed 2026-02-19T08:24:43.965843Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtg0mykbx6xehbw6pf36yz2", Created new session, sessionId: ydb://session/3?node_id=1&id=NzBmYzBkMjMtNzA5NWI5YWMtYzUzZjE4OTgtOGZlNjA0NWQ=, workerId: [1:7608489397517878081:2325], database: /Root, longSession: 1, local sessions count: 1 2026-02-19T08:24:43.966073Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtg0mykbx6xehbw6pf36yz2 2026-02-19T08:24:43.966126Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:24:43.966150Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:24:43.966177Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 5 2026-02-19T08:24:43.968258Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397517878083:2499][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7608489384632975617:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.968440Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397517878082:2498][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7608489384632975617:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:43.990381Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489397517878099:2501][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7608489384632975617:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:44.006329Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtg0q75dp52p9sb3fep130e, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmYzBkMjMtNzA5NWI5YWMtYzUzZjE4OTgtOGZlNjA0NWQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7608489397517878081:2325] 2026-02-19T08:24:44.006363Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7608489401812845404:2503] 2026-02-19T08:24:44.007512Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7608489401812845406:2504][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7608489384632975617:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-02-19T08:24:44.009413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489401812845405:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:44.009507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:24:44.009904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608489401812845414:2330], DatabaseId: /Root, PoolId: default, Failed to fe ... type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-02-19T08:25:31.522420Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [9:7608489602201111653:2337] TxId: 281474976710662. Ctx: { TraceId: 01khtg2589bbzh8dnstgndcgaa, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YjRlN2E1NDEtNDhjNmNiZjMtNGY0ZjgxNmMtOTVjYjE4OGM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-02-19T08:25:31.528117Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg2589bbzh8dnstgndcgaa, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YjRlN2E1NDEtNDhjNmNiZjMtNGY0ZjgxNmMtOTVjYjE4OGM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489602201111659:2352] 2026-02-19T08:25:31.528240Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01khtg2589bbzh8dnstgndcgaa, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YjRlN2E1NDEtNDhjNmNiZjMtNGY0ZjgxNmMtOTVjYjE4OGM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7608489602201111658:2351] 2026-02-19T08:25:31.528795Z node 9 :KQP_COMPUTE INFO: dq_info_aggregation_actor.cpp:239: [TDqInfoAggregationActor] TxId: 281474976710662, SelfId: [9:7608489602201111656:2350]. Received TEvPoison from: [9:7608489602201111653:2337] 2026-02-19T08:25:31.528971Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtg2589bbzh8dnstgndcgaa", Forwarded response to sender actor, requestId: 4, sender: [9:7608489602201111554:2336], selfId: [9:7608489580726274384:2270], source: [9:7608489602201111555:2337] 2026-02-19T08:25:31.529406Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=9&id=YjRlN2E1NDEtNDhjNmNiZjMtNGY0ZjgxNmMtOTVjYjE4OGM=, workerId: [9:7608489602201111555:2337], local sessions count: 0 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:23.395297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:23.395390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.395448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:23.395481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:23.395514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:23.395548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:23.395575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:23.395616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:23.395686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:23.395773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:23.396544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:23.396835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:23.483564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:23.483628Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:23.495200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:23.495834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:23.496003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:23.506849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:23.507630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:23.508320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:23.508603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:23.511551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.511743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:23.512926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:23.512989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:23.513101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:23.513141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:23.513180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:23.513361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:23.656539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.657996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:23.658490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 46678944, LocalPathId: 39] 2026-02-19T08:25:32.377689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:32.377731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:25:32.377762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:25:32.377783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:25:32.378166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:25:32.378203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:25:32.378280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:32.378308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:32.378357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:25:32.378388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:32.378417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:25:32.378446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:25:32.378499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:25:32.378545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:25:32.378688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:25:32.378733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-02-19T08:25:32.378766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 9 2026-02-19T08:25:32.378800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:25:32.379873Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-02-19T08:25:32.379982Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-02-19T08:25:32.380095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:32.380173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:32.380206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:32.380267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:25:32.380334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:25:32.380825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:32.381445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:32.381517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:25:32.381549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:25:32.381578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 9 2026-02-19T08:25:32.381630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:25:32.381726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:25:32.382228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-02-19T08:25:32.382908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:25:32.382958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:25:32.383041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:25:32.387378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:32.387492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:3 2026-02-19T08:25:32.387560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:25:32.387627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046678944:4 2026-02-19T08:25:32.387820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:25:32.388146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:25:32.388188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:25:32.388662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:25:32.388761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:25:32.388801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1146:3048] TestWaitNotification: OK eventTxId 103 2026-02-19T08:25:32.765993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:32.766235Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 275us result status StatusSuccess 2026-02-19T08:25:32.766676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:31.087134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:31.087227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:31.087297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:31.087342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:31.087407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:31.087451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:31.087502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:31.087531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:31.087639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:31.087723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:31.088608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:31.088955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:31.183196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:31.183261Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:31.195770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:31.196431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:31.196610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:31.208290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:31.208972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:31.209757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:31.210062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:31.213240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:31.213452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:31.214647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:31.214716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:31.214847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:31.214906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:31.214954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:31.215142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:31.344505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.345543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.345776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.345873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.345974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.346445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... erationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:34.052766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:34.052828Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000040 2026-02-19T08:25:34.053019Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 Propose -> ProposedWaitParts 2026-02-19T08:25:34.053157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:25:34.053201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-02-19T08:25:34.055261Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:34.055307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:34.055475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:25:34.055630Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:34.055665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:749:2723], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-02-19T08:25:34.055698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:749:2723], at schemeshard: 72057594046678944, txId: 108, path id: 41 2026-02-19T08:25:34.056054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:34.056114Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:25:34.056175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2026-02-19T08:25:34.056778Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:34.056861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:34.056909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:25:34.056947Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2026-02-19T08:25:34.056991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:25:34.057930Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:34.058002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-02-19T08:25:34.058023Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-02-19T08:25:34.058070Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2026-02-19T08:25:34.058104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-02-19T08:25:34.058165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-02-19T08:25:34.060336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2026-02-19T08:25:34.061154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:25:34.062007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-02-19T08:25:34.074041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-02-19T08:25:34.074103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2026-02-19T08:25:34.074232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-02-19T08:25:34.074306Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 108:0 ProposedWaitParts -> Done FAKE_COORDINATOR: Erasing txId 108 2026-02-19T08:25:34.075856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:34.075985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-02-19T08:25:34.076026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 108:0 ProgressState 2026-02-19T08:25:34.076123Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:25:34.076162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:34.076201Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-02-19T08:25:34.076236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:34.076266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-02-19T08:25:34.076316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:904:2851] message: TxId: 108 2026-02-19T08:25:34.076352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-02-19T08:25:34.076380Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-02-19T08:25:34.076414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 108:0 2026-02-19T08:25:34.076531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:25:34.078285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:25:34.078352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:1354:3263] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-02-19T08:25:34.080833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:25:34.081035Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2026-02-19T08:25:34.081340Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2026-02-19T08:25:34.083244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:25:34.083431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-02-19T08:25:34.083845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-02-19T08:25:34.083885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-02-19T08:25:34.084301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-02-19T08:25:34.084385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-02-19T08:25:34.084413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:1389:3298] TestWaitNotification: OK eventTxId 109 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007D65F3C04100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:25.637085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:25.637159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:25.637209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:25.637236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:25.637260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:25.637286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:25.637321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:25.637343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:25.637402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:25.637468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:25.638306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:25.638644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:25.718224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:25.718277Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:25.731801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:25.733982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:25.734199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:25.746216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:25.747031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:25.747854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:25.748159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:25.751345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:25.751545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:25.752670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:25.752742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:25.752855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:25.752902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:25.752947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:25.753111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:25.893894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.894899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.895935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:25.896004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: E ... 65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 Leader for TabletID 72057594046678944 is [2:1020:2979] sender: [2:1078:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 2026-02-19T08:25:36.208706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 2026-02-19T08:25:36.261190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 0 2026-02-19T08:25:36.261418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:36.261487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-02-19T08:25:36.261571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:466: OLAP store contains 1 tables. 2026-02-19T08:25:36.261635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:488: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D65F3C01100, stats written 1 2026-02-19T08:25:36.262244Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:36.262508Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 317us result status StatusSuccess 2026-02-19T08:25:36.263146Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 167 LastUpdateTime: 167 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:27.569188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:27.569292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:27.569353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:27.569387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:27.569424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:27.569460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:27.569507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:27.569539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:27.569618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:27.569692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:27.570521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:27.570826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:27.659368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:27.659434Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:27.671667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:27.672353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:27.672535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:27.683838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:27.684592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:27.685285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:27.685583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:27.689165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:27.689363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:27.690612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:27.690677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:27.690792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:27.690834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:27.690874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:27.691066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:27.845469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.846957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:27.847462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 Leader for TabletID 72057594046678944 is [2:1098:3017] sender: [2:1174:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 2026-02-19T08:25:36.187472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.093 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 2026-02-19T08:25:36.218688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 0 2026-02-19T08:25:36.218915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-02-19T08:25:36.218992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:36.219112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 2026-02-19T08:25:36.229731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 2026-02-19T08:25:36.250832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 2026-02-19T08:25:36.261586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 1 2026-02-19T08:25:36.261754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-02-19T08:25:36.261792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:36.261893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:25:36.261930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-02-19T08:25:36.261961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-02-19T08:25:36.261991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D717CCE9100, stats written 3 2026-02-19T08:25:36.262433Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:36.262601Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 204us result status StatusSuccess 2026-02-19T08:25:36.262995Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TxUsage::WriteToTopic_Demo_46_Table |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:24.143820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:24.143895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.143956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:24.144013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:24.144057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:24.144091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:24.144122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:24.144154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:24.144211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:24.144294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:24.145057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:24.145323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:24.232727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:24.232782Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:24.244095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:24.244703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:24.244873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:24.255560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:24.256176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:24.256936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:24.257178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:24.259966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.260138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:24.261226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:24.261286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:24.261390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:24.261437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:24.261476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:24.261643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:24.400464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.401934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:24.402470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 91546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:36.891589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:36.891777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-02-19T08:25:36.891941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:36.891979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-02-19T08:25:36.892030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-02-19T08:25:36.892540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:36.892583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-02-19T08:25:36.892652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:36.892686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-02-19T08:25:36.892731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 ProposedWaitParts -> Done 2026-02-19T08:25:36.893460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:36.893531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:36.893565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:36.893604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 14 2026-02-19T08:25:36.893641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-02-19T08:25:36.894838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:36.894898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-02-19T08:25:36.894918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-02-19T08:25:36.894941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-02-19T08:25:36.894963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-02-19T08:25:36.895020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:25:36.897198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-02-19T08:25:36.897280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-02-19T08:25:36.897596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-02-19T08:25:36.897791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:36.897835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:36.897878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:25:36.897910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:36.897946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:25:36.898019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:991:2929] message: TxId: 104 2026-02-19T08:25:36.898063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:25:36.898101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:25:36.898128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:25:36.898227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-02-19T08:25:36.898900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-02-19T08:25:36.898935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-02-19T08:25:36.899989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:36.900105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-02-19T08:25:36.900982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-02-19T08:25:36.901025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:849:2800], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-02-19T08:25:36.901458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:25:36.901516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2020:3927] 2026-02-19T08:25:36.901968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-02-19T08:25:36.904863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-02-19T08:25:36.905069Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 236us result status StatusSuccess 2026-02-19T08:25:36.905483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> Initializer::Simple >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TxUsage::ReadRuleGeneration |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> TraverseDatashard::TraverseTwoTablesServerless >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> TraverseColumnShard::TraverseColumnTable >> AnalyzeColumnshard::AnalyzeRebootSa |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> AnalyzeDatashard::AnalyzeTwoTables |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DDC97004100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:26.312935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:26.313045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:26.313124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:26.313173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:26.313219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:26.313260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:26.313293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:26.313323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:26.313389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:26.313490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:26.314403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:26.314748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:26.405621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:26.405702Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:26.418859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:26.419603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:26.419798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:26.431826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:26.432590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:26.433393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:26.433732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:26.437162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:26.437364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:26.438685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:26.438762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:26.438905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:26.438953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:26.438998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:26.439187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:26.588152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.591955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.592991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.593094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:26.593179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... Update, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 2026-02-19T08:25:40.401477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 0 2026-02-19T08:25:40.401677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:40.401754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-02-19T08:25:40.401822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:466: OLAP store contains 1 tables. 2026-02-19T08:25:40.401868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:488: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DDC97001100, stats written 1 2026-02-19T08:25:40.402416Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:40.402658Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 273us result status StatusSuccess 2026-02-19T08:25:40.403276Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 166912 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline >> TraverseDatashard::TraverseOneTable >> TxUsage::WriteToTopic_Demo_27_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] TEST create schemeshard, 0x00007DFF91204100 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:25:30.885104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:25:30.885177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:30.885224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:25:30.885255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:25:30.885294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:25:30.885333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:25:30.885363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:25:30.885391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:25:30.885461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:25:30.885548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:25:30.886421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:25:30.886762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:25:30.952149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:25:30.952204Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:30.962103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:25:30.962741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:25:30.962921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:25:30.973573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:25:30.974505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:25:30.975275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:25:30.975613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:25:30.979204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.979417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:25:30.980759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:25:30.980824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:25:30.980942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:25:30.980989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:25:30.981055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:25:30.981255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:25:31.100834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.101972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:25:31.102316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETo ... schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 2026-02-19T08:25:41.718851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 0 2026-02-19T08:25:41.719010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-02-19T08:25:41.719064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-02-19T08:25:41.719120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:497: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-02-19T08:25:41.719163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:507: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DFF911FF900, stats written 1 2026-02-19T08:25:41.719638Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:25:41.719905Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 301us result status StatusSuccess 2026-02-19T08:25:41.720377Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 150 LastUpdateTime: 150 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 166912 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> TraverseColumnShard::TraverseServerlessColumnTable >> AnalyzeColumnshard::AnalyzeStatus >> AnalyzeColumnshard::AnalyzeShard |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseDatashard::TraverseOneTableServerless >> IncrementalBackup::IncrementalBackupMultipleIndexes [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore >> AnalyzeDatashard::DropTableNavigateError >> AnalyzeColumnshard::Analyze |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::AutoPartitioning_KeyedWriteSession >> TraverseDatashard::TraverseTwoTables >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query [GOOD] >> TxUsage::WriteToTopic_Demo_46_Query >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> BasicUsage::KeyedWriteSession_BoundPartitionChooser_SplitPartition_MultiThreadedAcksOrder [GOOD] >> BasicUsage::SimpleBlockingKeyedWriteSession_BasicWrite >> TxUsage::ReadRuleGeneration [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.619438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.728204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.734024Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.734371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.734539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.137197Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.232004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.232110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.290067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.386735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.023804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.024596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.024641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.024670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.025091Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.088121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.629331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.327632Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.333923Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.337524Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.367354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.367468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.415444Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.417042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.576844Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.576936Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.578215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.578811Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.579311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580203Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580559Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580797Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.580970Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.595484Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.763148Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.799643Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.799728Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.830078Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.830913Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.831133Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.831193Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.831234Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.831290Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.831338Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.831403Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.832107Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.850323Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.850436Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2141:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.876934Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2213:2647] 2026-02-19T08:25:46.877156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2213:2647], schemeshard id = 72075186224037897 2026-02-19T08:25:46.929221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2268:2671] 2026-02-19T08:25:46.930939Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-02-19T08:25:46.939400Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.939445Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Creating table 2026-02-19T08:25:46.939500Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-02-19T08:25:46.944600Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2325:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.948540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.956139Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.956238Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.012034Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.052371Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-02-19T08:25:47.217136Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.484506Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.600854Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.600971Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2274:2675] Owner: [2:2273:2674]. Column diff is empty, finishing 2026-02-19T08:25:48.200521Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... 5:50.139460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.161250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:50.481385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3740:3633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.481524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.538552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3744:3636], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.538647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.539700Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3747:3639]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:50.539882Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:50.540047Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-02-19T08:25:50.540103Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:3750:3642] 2026-02-19T08:25:50.540167Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:3750:3642] 2026-02-19T08:25:50.540808Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3751:3553] 2026-02-19T08:25:50.541002Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:3750:3642], server id = [2:3751:3553], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:50.541234Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3751:3553], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:50.541294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:50.541536Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:50.541599Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:3747:3639], StatRequests.size() = 1 2026-02-19T08:25:50.558107Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:50.558690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3755:3646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.558796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.559364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3759:3650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.559446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.559526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3762:3653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:50.565765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:50.669756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:50.669837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:50.701889Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3750:3642], schemeshard count = 1 2026-02-19T08:25:50.962089Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3764:3655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:25:51.251510Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:3877:3722] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:51.261016Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3900:3738]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:51.261194Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:51.261226Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:3900:3738], StatRequests.size() = 1 2026-02-19T08:25:51.380727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:51.758732Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4237:3807]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:51.758911Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:25:51.758950Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [1:4237:3807], StatRequests.size() = 1 2026-02-19T08:25:51.780375Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4246:3816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:51.780533Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:25:51.780560Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [1:4246:3816], StatRequests.size() = 1 2026-02-19T08:25:51.935809Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4287:3803]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:51.937947Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:51.937998Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:51.938444Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:51.938474Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ] 2026-02-19T08:25:51.938520Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:51.945605Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:51.950218Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:51.950483Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-02-19T08:25:51.950653Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:51.950719Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:51.950933Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4315:3819]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:51.952934Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:51.952983Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:51.953326Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:51.953354Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ] 2026-02-19T08:25:51.953398Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:51.953623Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:51.955543Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-02-19T08:25:51.955959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 2026-02-19T08:25:51.956070Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-02-19T08:25:51.956192Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 >> TraverseDatashard::TraverseOneTable [GOOD] >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:44.987060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.076798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.081647Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:45.082015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:45.082173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:45.420216Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:45.508239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:45.508379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:45.567924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:45.677653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:46.341850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:46.342403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:46.342450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:46.342483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:46.343097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:46.410415Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:46.947288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:49.627937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:49.633621Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:49.636381Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:49.658951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:49.659060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:49.707549Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:49.709186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:49.868475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:49.868599Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:49.870176Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.870906Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.871615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.872411Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.872784Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.872916Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.873139Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.873277Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.873434Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.888640Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.073410Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:50.126741Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:50.126847Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:50.161965Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:50.162133Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:50.162313Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:50.162363Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:50.162406Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:50.162452Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:50.162489Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:50.162532Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:50.162926Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:50.166980Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.167080Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.191293Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:50.192538Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:50.258198Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2263:2669] 2026-02-19T08:25:50.260053Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:50.270425Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:50.270501Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:50.270572Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:50.276390Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:50.279787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:50.286162Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:50.286297Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:50.297392Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:50.499478Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:50.512181Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:50.829553Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:50.969411Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:50.969491Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:51.715622Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:51.904057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2752:3376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.904173Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.904510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2770:3381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.904569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.919280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:52.398106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3057:3429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.398469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.399007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3061:3432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.399128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.400184Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3064:3435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:52.400421Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:52.400509Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:3066:3437] 2026-02-19T08:25:52.400587Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:3066:3437] 2026-02-19T08:25:52.401134Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3067:3187] 2026-02-19T08:25:52.401423Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:3066:3437], server id = [2:3067:3187], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:52.401628Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3067:3187], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:52.401733Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:52.401977Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:52.402043Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:3064:3435], StatRequests.size() = 1 2026-02-19T08:25:52.415247Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:52.415566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3071:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.415717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.416061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3075:3445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.416123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.416238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3078:3448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.420990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:52.549800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:52.549872Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:52.623674Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3066:3437], schemeshard count = 1 2026-02-19T08:25:52.929692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3080:3450], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:25:53.133400Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:3184:3517] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:53.144221Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3207:3533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:53.144354Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:53.144410Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:3207:3533], StatRequests.size() = 1 2026-02-19T08:25:53.262987Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3255:3229]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:53.264902Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:53.264946Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:53.265285Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:53.265310Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ] 2026-02-19T08:25:53.265353Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:53.273922Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:53.279088Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:53.279396Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-02-19T08:25:53.279583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:53.279653Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:42.231095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:42.330044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.334380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.334644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.334751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.629631Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.723621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.723729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.781071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.890570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.518439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.518984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.519030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.519058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.519580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.584021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:44.098568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.182319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.187989Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.190681Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.213612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.213731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.262381Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.263880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.418447Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.418522Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.419555Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.420078Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.420542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421107Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421310Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421391Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421555Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421647Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.421827Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.436616Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.633390Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.674252Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.674331Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.710678Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.710904Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.711125Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.711189Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.711248Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.711300Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.711370Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.711431Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.711944Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.717535Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.717645Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.744360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:47.745483Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:47.819668Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:47.823251Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-02-19T08:25:47.836652Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.836726Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:47.836817Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-02-19T08:25:47.843691Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.848536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.869848Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.870019Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.886002Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:48.111964Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-02-19T08:25:48.173021Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.463990Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.584682Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.584756Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:49.176551Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... tion_create_table.cpp:689) 2026-02-19T08:25:52.922737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4420:3836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.922925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.923759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4424:3839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.923856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.925612Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:4427:3842]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:52.925922Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:52.926214Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-02-19T08:25:52.926287Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:4430:3845] 2026-02-19T08:25:52.926349Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:4430:3845] 2026-02-19T08:25:52.927127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4431:3924] 2026-02-19T08:25:52.927607Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:4430:3845], server id = [2:4431:3924], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:52.927893Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4431:3924], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:52.927973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:52.928278Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:52.928366Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:4427:3842], StatRequests.size() = 1 2026-02-19T08:25:52.943065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:52.945009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4435:3849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.945089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.945535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4439:3853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.945596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.945686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4442:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:52.950604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:53.119508Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:53.119573Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:53.196634Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:4430:3845], schemeshard count = 1 2026-02-19T08:25:53.403083Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4444:3858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2026-02-19T08:25:53.552434Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:4558:3929] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:53.562396Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:4581:3945]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:53.562610Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:53.562656Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:4581:3945], StatRequests.size() = 1 2026-02-19T08:25:53.701911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:54.028219Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4925:4009]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:54.028373Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:25:54.028659Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2026-02-19T08:25:54.028698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:54.028928Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:54.028981Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [1:4925:4009], StatRequests.size() = 1 2026-02-19T08:25:54.041031Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:54.048263Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4934:4018]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:54.048420Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:25:54.048447Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [1:4934:4018], StatRequests.size() = 1 2026-02-19T08:25:54.138389Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4977:4176]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:54.141036Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:54.141080Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:54.141398Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:54.141424Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ] 2026-02-19T08:25:54.141473Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:54.182908Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:54.187920Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:54.188167Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-02-19T08:25:54.188292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:54.188563Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:54.188834Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5005:4192]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:54.191302Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:54.191344Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:54.191556Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:54.191579Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ] 2026-02-19T08:25:54.191619Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:54.191780Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:54.193538Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-02-19T08:25:54.193914Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 2026-02-19T08:25:54.194354Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-02-19T08:25:54.194521Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 >> AnalyzeColumnshard::AnalyzeShard [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:46.124259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.200277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.206069Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:46.206303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:46.206446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:46.534156Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.618196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.618324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.675461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.772213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:47.385171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:47.385554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:47.385593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:47.385619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:47.386133Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:47.447673Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:47.968494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:50.921202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:50.928609Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:50.932063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:50.958415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.958517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:51.006801Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:51.008604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:51.170398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:51.170524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:51.172055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.172674Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.173318Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.174024Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.174423Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.174559Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.174793Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.174978Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.175228Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:51.190227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:51.349448Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:51.388546Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:51.388640Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:51.423471Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:51.423685Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:51.423909Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:51.423978Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:51.424040Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:51.424099Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:51.424193Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:51.424248Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:51.424753Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:51.430337Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:51.430439Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:51.453059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:51.454218Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:51.517943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2263:2669] 2026-02-19T08:25:51.520188Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:51.531357Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:51.531442Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:51.531523Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:51.537302Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:51.542201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:51.548426Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:51.548559Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:51.561442Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:51.783778Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:51.861176Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:52.092080Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:52.220317Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:52.220420Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:52.985158Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:53.204958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2752:3376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:53.205124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:53.205619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2770:3381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:53.205724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:53.226605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:25:53.353486Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:25:53.353822Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:25:53.354142Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:25:53.354289Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:25:53.354409Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:25:53.354566Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:25:53.354773Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:25:53.354905Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:25:53.355027Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:25:53.355158Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:25:53.355282Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:25:53.355401Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:25:53.355561Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:25:53.388859Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:25:53.388975Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:25:53.389126Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:25:53.389175Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:25:53.389431Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:25:53.389500Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:25:53.389638Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:25:53.389700Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:25:53.389800Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:25:53.389845Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:25:53.389903Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:25:53.389949Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:25:53.390180Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:25:53.390235Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:25:53.390403Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:25:53.390461Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:25:53.390524Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:25:53.390568Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:25:53.390621Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:25:53.390661Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:25:53.390833Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:25:53.390884Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-02-19T08:25:53.391005Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-02-19T08:25:53.391059Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:25:53.464896Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2824:3027];ev=NActors::IEventHandle;tablet_id=72075186224037899;tx_id=281474976715659;this=136311403570432;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=2970;max=18446744073709551615;plan=0;src=[2:1918:2458];cookie=121:2;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-02-19T08:25:53.500724Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-02-19T08:25:53.500834Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-02-19T08:25:53.500874Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000013s >> AnalyzeColumnshard::AnalyzeEmptyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:52.176741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:52.293704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:52.313128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:52.313217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:52.313816Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:52.633691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:52.633845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:52.724196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489428933989 != 1771489428933993 2026-02-19T08:23:52.734094Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:52.782878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:52.885652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:53.191916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:837:2731], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.192002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.192040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:53.192116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:835:2729], Recipient [1:404:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-02-19T08:23:53.192145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:23:53.339535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:23:53.339810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:53.340034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:23:53.340083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:23:53.340335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:53.340447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:53.340569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:53.341464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:53.341638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:23:53.341683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:53.341744Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:53.341932Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.341975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.342057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:53.342122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:23:53.342182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:23:53.342223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:23:53.342341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:53.343327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:53.343374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:53.343509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.343544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.343604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:53.343690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:23:53.343774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:23:53.343855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:53.344248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:53.344299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:53.344392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.344444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:53.344499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:53.344549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:53.344622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:23:53.344655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:53.344693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:23:53.359358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:23:53.360153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:53.360234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:23:53.360418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:23:53.360729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269091328, Sender [1:400:2399], Recipient [1:404:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-02-19T08:23:53.361124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:843:2737], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.361177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:53.361225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:53.361429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [1:835:2729], Recipient [1:404:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-02-19T08:23:53.361468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:23:53.361529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight ... [10:407:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:54.722895Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:54.722909Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:25:54.723016Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [10:3823:4806], Recipient [10:407:2405]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037918 TableLocalId: 89 Generation: 1 Round: 0 TableStats { DataSize: 658 RowCount: 3 IndexSize: 0 InMemSize: 576 LastAccessTime: 0 LastUpdateTime: 11975 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 82 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037918 UserTablePartOwners: 72075186224037888 NodeId: 10 StartTime: 10950 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:25:54.723037Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:25:54.723057Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037918 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 89] state 'Ready' dataSize 658 rowCount 3 cpuUsage 0 2026-02-19T08:25:54.723118Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037918 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 89] raw table stats: DataSize: 658 RowCount: 3 IndexSize: 0 InMemSize: 576 LastAccessTime: 0 LastUpdateTime: 11975 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 82 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:25:54.723156Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [10:4246:5188], Recipient [10:407:2405]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:54.723174Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:54.723194Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:25:54.723313Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [10:3825:4808], Recipient [10:407:2405]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037919 TableLocalId: 92 Generation: 1 Round: 0 TableStats { DataSize: 658 RowCount: 3 IndexSize: 0 InMemSize: 576 LastAccessTime: 0 LastUpdateTime: 11975 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 82 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037919 UserTablePartOwners: 72075186224037891 NodeId: 10 StartTime: 10950 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:25:54.723335Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:25:54.723356Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037919 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 92] state 'Ready' dataSize 658 rowCount 3 cpuUsage 0 2026-02-19T08:25:54.723429Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037919 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 92] raw table stats: DataSize: 658 RowCount: 3 IndexSize: 0 InMemSize: 576 LastAccessTime: 0 LastUpdateTime: 11975 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 82 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:25:54.767750Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [10:407:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:54.767812Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:54.767852Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 4 2026-02-19T08:25:54.767929Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 4 2026-02-19T08:25:54.767961Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 4 2026-02-19T08:25:54.768069Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 90 shard idx 72057594046644480:31 data size 666 row count 3 2026-02-19T08:25:54.768116Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037916 maps to shardIdx: 72057594046644480:31 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 90], pathId map=Table2, is column=0, is olap=0, RowCount 3, DataSize 666, with borrowed parts 2026-02-19T08:25:54.768140Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037916, followerId 0 2026-02-19T08:25:54.768218Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037916: SplitByLoadNotEnabledForTable 2026-02-19T08:25:54.768267Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 87 shard idx 72057594046644480:29 data size 666 row count 3 2026-02-19T08:25:54.768295Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037917 maps to shardIdx: 72057594046644480:29 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 87], pathId map=Table1, is column=0, is olap=0, RowCount 3, DataSize 666, with borrowed parts 2026-02-19T08:25:54.768310Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037917, followerId 0 2026-02-19T08:25:54.768343Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037917: SplitByLoadNotEnabledForTable 2026-02-19T08:25:54.768370Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 89 shard idx 72057594046644480:30 data size 658 row count 3 2026-02-19T08:25:54.768405Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037918 maps to shardIdx: 72057594046644480:30 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 89], pathId map=indexImplTable, is column=0, is olap=0, RowCount 3, DataSize 658, with borrowed parts 2026-02-19T08:25:54.768421Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037918, followerId 0 2026-02-19T08:25:54.768462Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037918: SplitByLoadNotEnabledForTable 2026-02-19T08:25:54.768482Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 92 shard idx 72057594046644480:32 data size 658 row count 3 2026-02-19T08:25:54.768510Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037919 maps to shardIdx: 72057594046644480:32 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 92], pathId map=indexImplTable, is column=0, is olap=0, RowCount 3, DataSize 658, with borrowed parts 2026-02-19T08:25:54.768529Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037919, followerId 0 2026-02-19T08:25:54.768580Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037919: SplitByLoadNotEnabledForTable 2026-02-19T08:25:54.768625Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:25:54.779272Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [10:407:2405]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:54.779342Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:54.779378Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:25:54.841400Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:407:2405]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:25:54.841462Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:25:54.841526Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [10:407:2405], Recipient [10:407:2405]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:25:54.841549Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } } { items { uint32_value: 1 } items { uint32_value: 1000 } }, { items { uint32_value: 2 } items { uint32_value: 2000 } }, { items { uint32_value: 3 } items { uint32_value: 3000 } } { items { uint32_value: 3 } } { items { uint32_value: 3 } } { items { uint64_value: 3 } } { items { uint64_value: 3 } } { items { uint32_value: 300 } items { uint32_value: 3 } } { items { uint32_value: 3000 } items { uint32_value: 3 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:47.680449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.772730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.778723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:47.779036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:47.779163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:48.142423Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:48.240193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:48.240275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:48.297308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:48.393262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:49.045091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:49.045565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:49.045600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:49.045622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:49.046052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:49.108487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:49.649025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:52.166726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:52.172679Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:52.175496Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:52.198652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.198752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.246464Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:52.247935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.405938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.406042Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.407529Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.408279Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.409027Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.409897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.410257Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.410390Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.410620Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.410791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.410963Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.426352Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.593893Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:52.629457Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:52.629541Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:52.658239Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:52.658413Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:52.658569Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:52.658607Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:52.658664Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:52.658705Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:52.658742Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:52.658781Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:52.659158Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:52.662992Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:52.663076Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:52.682649Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:52.683562Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:52.734033Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:52.736485Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-02-19T08:25:52.745146Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:52.745188Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:52.745247Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-02-19T08:25:52.749599Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:52.752821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:52.758509Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:52.758607Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:52.769150Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:52.973658Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-02-19T08:25:53.013447Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:53.288772Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:53.410028Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:53.410127Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:53.996450Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:54.023662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:54.592048Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:54.650914Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8489: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2026-02-19T08:25:54.650991Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8505: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2026-02-19T08:25:54.651087Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:3084:3129], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2026-02-19T08:25:54.653105Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3085:3130] 2026-02-19T08:25:54.655009Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3085:3130], schemeshard id = 72075186224037899 2026-02-19T08:25:55.736913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3445:3585], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:55.737062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:55.737426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3463:3590], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:55.737512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:55.750430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:56.018701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3741:3634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.067430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.068141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3745:3637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.068263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.069375Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3748:3640]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:56.069500Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:56.069659Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-02-19T08:25:56.069746Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:3751:3643] 2026-02-19T08:25:56.069799Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:3751:3643] 2026-02-19T08:25:56.070297Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3752:3554] 2026-02-19T08:25:56.070465Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:3751:3643], server id = [2:3752:3554], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:56.070702Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3752:3554], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:56.070756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:56.070960Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:56.071007Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:3748:3640], StatRequests.size() = 1 2026-02-19T08:25:56.085417Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:56.086265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3756:3647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.086398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.086962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3760:3651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.087028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.087112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3763:3654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.092700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:56.183282Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:56.183357Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:56.204448Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3751:3643], schemeshard count = 1 2026-02-19T08:25:56.405500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3765:3656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:25:56.613239Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:3876:3721] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:56.626088Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3899:3737]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:56.626364Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:56.626404Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:3899:3737], StatRequests.size() = 1 2026-02-19T08:25:56.777313Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3940:3600]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:56.780284Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:56.780350Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:56.780707Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:56.780746Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ] 2026-02-19T08:25:56.780797Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:56.808466Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:56.814725Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:56.815053Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-02-19T08:25:56.815184Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:56.815366Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:50.254876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:50.329352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:50.333834Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:50.334104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:50.334264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:50.630085Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:50.715935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.716066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.772982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.876603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:51.492597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:51.493106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:51.493150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:51.493180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:51.493757Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:51.558597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:52.081214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:54.527508Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:54.533152Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:54.535740Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:54.556856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:54.556948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:54.605147Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:54.607069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:54.758647Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:54.758716Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:54.759695Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.760180Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.760632Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761172Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.761941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:54.776581Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:54.952998Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:54.987944Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:54.988032Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:55.014058Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:55.014216Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:55.014365Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:55.014405Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:55.014448Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:55.014487Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:55.014529Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:55.014571Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:55.014900Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:55.018403Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:55.018489Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:55.037148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:55.037943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:55.087066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:55.089091Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:55.097476Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:55.097537Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:55.097617Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:55.101821Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:55.104613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:55.109947Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:55.110041Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:55.119000Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:55.300260Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:55.360644Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:55.580576Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:55.709428Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:55.709510Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:56.385009Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... led to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.603176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:56.615642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:57.093132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3061:3432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.093452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.094023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3065:3435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.094131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.095266Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3068:3438]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:57.095438Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:57.095503Z node 1 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [1:3070:3440] 2026-02-19T08:25:57.095555Z node 1 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [1:3070:3440] 2026-02-19T08:25:57.096105Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3071:3188] 2026-02-19T08:25:57.096395Z node 1 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 1, client id = [1:3070:3440], server id = [2:3071:3188], tablet id = 72075186224037894, status = OK 2026-02-19T08:25:57.096611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3071:3188], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:25:57.096673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-02-19T08:25:57.096901Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-02-19T08:25:57.096965Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [1:3068:3438], StatRequests.size() = 1 2026-02-19T08:25:57.114806Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:57.115333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3075:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.115487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.116509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3079:3448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.116629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3082:3451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.116695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:57.121289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:57.259780Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:57.259852Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:57.322565Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3070:3440], schemeshard count = 1 2026-02-19T08:25:57.637599Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3084:3453], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:25:57.775964Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:3187:3518] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:57.785102Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3210:3534]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:57.785315Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:57.785346Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [1:3210:3534], StatRequests.size() = 1 2026-02-19T08:25:57.934678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:58.235950Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3556:3601]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:58.236125Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:25:58.236158Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [1:3556:3601], StatRequests.size() = 1 2026-02-19T08:25:58.256036Z node 1 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3565:3610]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:25:58.256154Z node 1 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:25:58.256177Z node 1 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [1:3565:3610], StatRequests.size() = 1 2026-02-19T08:25:58.368384Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3614:3448]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:58.371333Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:58.371399Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:58.371678Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:25:58.371705Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ] 2026-02-19T08:25:58.371738Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:58.378338Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:58.381646Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:25:58.381924Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-02-19T08:25:58.382112Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:58.382198Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 1 2026-02-19T08:25:58.382828Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3642:3464]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:25:58.384782Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:58.384822Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:25:58.385051Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:25:58.385073Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ] 2026-02-19T08:25:58.385106Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:25:58.385266Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:25:58.386906Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-02-19T08:25:58.387474Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 2026-02-19T08:25:58.387600Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-02-19T08:25:58.387701Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 2 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] >> AnalyzeColumnshard::AnalyzeCancel >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::TrickyTableAndColumnNames ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:51.128256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:51.292487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:51.315943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:51.316041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:51.316755Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:51.646639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:51.646754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:51.718326Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489427911170 != 1771489427911174 2026-02-19T08:23:51.737559Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:51.787076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:51.880996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:52.212422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:837:2731], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.212497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.212536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:52.212605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:835:2729], Recipient [1:404:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-02-19T08:23:52.212655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:23:52.366690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:23:52.366978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.367199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:23:52.367252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:23:52.367507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:52.367650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:52.367756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.368488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:52.368677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:23:52.368726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.368774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.369000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.369043Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.369134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.369193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:23:52.369241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:23:52.369287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:23:52.369411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.369956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.370035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.370192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.370232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.370301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.370351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:23:52.370406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:23:52.370494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.370893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.370936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.371044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.371088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.371136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.371173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.371225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:23:52.371259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.371298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:23:52.384314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:23:52.385023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.385089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:23:52.385242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:23:52.385519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269091328, Sender [1:400:2399], Recipient [1:404:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-02-19T08:23:52.385975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:843:2737], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.386027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.386088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:52.386219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [1:835:2729], Recipient [1:404:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-02-19T08:23:52.386258Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:23:52.386329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight ... PartOwners: 72075186224037888 NodeId: 11 StartTime: 10450 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:25:59.338612Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:25:59.338694Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 71] state 'Ready' dataSize 1168 rowCount 12 cpuUsage 0 2026-02-19T08:25:59.338864Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 71] raw table stats: DataSize: 1168 RowCount: 12 IndexSize: 0 InMemSize: 1024 LastAccessTime: 0 LastUpdateTime: 11475 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 144 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:25:59.338933Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-02-19T08:25:59.339083Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [11:3023:4335], Recipient [11:405:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:59.339105Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:59.339121Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:25:59.339264Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [11:2726:4068], Recipient [11:405:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037905 TableLocalId: 69 Generation: 1 Round: 0 TableStats { DataSize: 1192 RowCount: 10 IndexSize: 0 InMemSize: 992 LastAccessTime: 0 LastUpdateTime: 11475 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 200 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037905 UserTablePartOwners: 72075186224037889 NodeId: 11 StartTime: 10450 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:25:59.339286Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:25:59.339309Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037905 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 69] state 'Ready' dataSize 1192 rowCount 10 cpuUsage 0 2026-02-19T08:25:59.339391Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037905 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 69] raw table stats: DataSize: 1192 RowCount: 10 IndexSize: 0 InMemSize: 992 LastAccessTime: 0 LastUpdateTime: 11475 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 2 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 200 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:25:59.339451Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [11:3024:4336], Recipient [11:405:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:59.339468Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:25:59.339482Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:25:59.339582Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269553162, Sender [11:2731:4071], Recipient [11:405:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037906 TableLocalId: 69 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82712 } ShardState: 2 UserTablePartOwners: 72075186224037906 NodeId: 11 StartTime: 10450 TableOwnerId: 72057594046644480 FollowerId: 0 2026-02-19T08:25:59.339601Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5335: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-02-19T08:25:59.339620Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037906 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 69] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:25:59.339678Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:767: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037906 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 69] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:25:59.360395Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:405:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:59.360464Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:59.360523Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 3 2026-02-19T08:25:59.360610Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 3 2026-02-19T08:25:59.360638Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 3 2026-02-19T08:25:59.360734Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 71 shard idx 72057594046644480:19 data size 1168 row count 12 2026-02-19T08:25:59.360779Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037904 maps to shardIdx: 72057594046644480:19 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 71], pathId map=indexImplTable, is column=0, is olap=0, RowCount 12, DataSize 1168, with borrowed parts 2026-02-19T08:25:59.360800Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037904, followerId 0 2026-02-19T08:25:59.360893Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037904: SplitByLoadNotEnabledForTable 2026-02-19T08:25:59.360940Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 69 shard idx 72057594046644480:17 data size 1192 row count 10 2026-02-19T08:25:59.360991Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037905 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 69], pathId map=DataVerifyTable, is column=0, is olap=0, RowCount 10, DataSize 1192, with borrowed parts 2026-02-19T08:25:59.361027Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037905, followerId 0 2026-02-19T08:25:59.361085Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037905: SplitByLoadNotEnabledForTable 2026-02-19T08:25:59.361120Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 69 shard idx 72057594046644480:18 data size 0 row count 0 2026-02-19T08:25:59.361175Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:18 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 69], pathId map=DataVerifyTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:25:59.361198Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2026-02-19T08:25:59.361244Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:18 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:25:59.361282Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2026-02-19T08:25:59.361335Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:25:59.371846Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:405:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:59.371899Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:25:59.371921Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 { items { uint32_value: 2 } items { text_value: "Bob" } items { uint32_value: 31 } }, { items { uint32_value: 12 } items { text_value: "David" } items { uint32_value: 41 } }, { items { uint32_value: 13 } items { text_value: "Frank" } items { uint32_value: 45 } } { items { uint32_value: 28 } items { uint32_value: 3 } }, { items { uint32_value: 31 } items { uint32_value: 2 } }, { items { uint32_value: 41 } items { uint32_value: 12 } }, { items { uint32_value: 45 } items { uint32_value: 13 } } { items { uint64_value: 4 } } >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> KqpBatchUpdate::TableWithFullTextIndex >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> TxUsage::WriteToTopic_Demo_47_Table >> KqpBatchDelete::SimpleOnePartition >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> BasicUsage::SimpleBlockingKeyedWriteSession_BasicWrite [GOOD] >> BasicUsage::KeyedWriteSession_CloseTimeout >> TxUsage::WriteToTopic_Demo_27_Query [GOOD] >> TxUsage::WriteToTopic_Demo_38_Table >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:23:24.612401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:23:24.612485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:24.612564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:23:24.612607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:23:24.612659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:23:24.612705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:23:24.612732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:23:24.612768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:23:24.612849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:23:24.612913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:23:24.613774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:23:24.614151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:23:24.708232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:24.708296Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:24.724238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:23:24.725053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:23:24.725289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:23:24.740505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:23:24.741737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:23:24.742542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:23:24.742874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:23:24.746509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:24.746709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:23:24.747972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:23:24.748063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:23:24.748182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:23:24.748233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:23:24.748274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:23:24.748445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:23:24.911825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.912745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.912899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.912989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.913626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:23:24.914249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 51615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:26:04.914179Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:26:04.914203Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:26:04.915111Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:26:04.915176Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:26:04.915203Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:26:04.915231Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-02-19T08:26:04.915258Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:26:04.915346Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2026-02-19T08:26:04.916343Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:26:04.916383Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:26:04.916616Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:26:04.916716Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-02-19T08:26:04.916743Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-02-19T08:26:04.916782Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-02-19T08:26:04.916806Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-02-19T08:26:04.916842Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2026-02-19T08:26:04.916919Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [20:786:2752] message: TxId: 103 2026-02-19T08:26:04.916990Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-02-19T08:26:04.917058Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:26:04.917114Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:26:04.917235Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:26:04.917292Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-02-19T08:26:04.917311Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:1 2026-02-19T08:26:04.917337Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:26:04.917358Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-02-19T08:26:04.917377Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:2 2026-02-19T08:26:04.917409Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:26:04.917434Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:3 2026-02-19T08:26:04.917453Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:3 2026-02-19T08:26:04.917488Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:26:04.917516Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:4 2026-02-19T08:26:04.917541Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:4 2026-02-19T08:26:04.917592Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-02-19T08:26:04.918485Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:26:04.918566Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-02-19T08:26:04.918675Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:26:04.918738Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-02-19T08:26:04.918772Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:26:04.919023Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.919293Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.920199Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.920242Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.920320Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.921972Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:26:04.922232Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:26:04.922303Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [20:1130:3030] 2026-02-19T08:26:04.923252Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2026-02-19T08:26:04.923846Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:26:04.924175Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 385us result status StatusPathDoesNotExist 2026-02-19T08:26:04.924377Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:26:04.924915Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:26:04.925199Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 316us result status StatusPathDoesNotExist 2026-02-19T08:26:04.925357Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query >> KqpBatchUpdate::TableWithFullTextIndex [GOOD] |93.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 6232056113082325209 Reassign# 6 -- VSlotId { NodeId: 7 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 6 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 7 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:41:0] Put# [1:1:2:0:0:29:0] Put# [1:1:3:0:0:68:0] 2026-02-19T08:23:03.048692Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:23:03.051273Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6075875313894234839] 2026-02-19T08:23:03.063519Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:1:0:0:41:2] 2026-02-19T08:23:03.063617Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:2:0:0:29:3] 2026-02-19T08:23:03.063656Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:68:4] 2026-02-19T08:23:03.063991Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 3 PartsResurrected# 3 Put# [1:1:4:0:0:8:0] Put# [1:1:5:0:0:99:0] Put# [1:1:6:0:0:50:0] Put# [1:1:7:0:0:87:0] Put# [1:1:8:0:0:63:0] Put# [1:1:9:0:0:6:0] Put# [1:1:10:0:0:51:0] Put# [1:1:11:0:0:62:0] Put# [1:1:12:0:0:38:0] Put# [1:1:13:0:0:87:0] Put# [1:1:14:0:0:61:0] Put# [1:1:15:0:0:1:0] Put# [1:1:16:0:0:35:0] Put# [1:1:17:0:0:75:0] Put# [1:1:18:0:0:93:0] Put# [1:1:19:0:0:53:0] Put# [1:1:20:0:0:74:0] Put# [1:1:21:0:0:21:0] Put# [1:1:22:0:0:49:0] Put# [1:1:23:0:0:35:0] Put# [1:1:24:0:0:52:0] Put# [1:1:25:0:0:59:0] Put# [1:1:26:0:0:40:0] Put# [1:1:27:0:0:99:0] Put# [1:1:28:0:0:59:0] Put# [1:1:29:0:0:94:0] Put# [1:1:30:0:0:99:0] Put# [1:1:31:0:0:34:0] Put# [1:1:32:0:0:59:0] Put# [1:1:33:0:0:12:0] Put# [1:1:34:0:0:94:0] Put# [1:1:35:0:0:30:0] Put# [1:1:36:0:0:28:0] Put# [1:1:37:0:0:45:0] Put# [1:1:38:0:0:49:0] Put# [1:1:39:0:0:25:0] Put# [1:1:40:0:0:28:0] Put# [1:1:41:0:0:86:0] Put# [1:1:42:0:0:71:0] Put# [1:1:43:0:0:64:0] Put# [1:1:44:0:0:87:0] Put# [1:1:45:0:0:69:0] Put# [1:1:46:0:0:72:0] Put# [1:1:47:0:0:52:0] Put# [1:1:48:0:0:66:0] Put# [1:1:49:0:0:34:0] Put# [1:1:50:0:0:80:0] Put# [1:1:51:0:0:32:0] Put# [1:1:52:0:0:72:0] Put# [1:1:53:0:0:35:0] Put# [1:1:54:0:0:74:0] Put# [1:1:55:0:0:40:0] Put# [1:1:56:0:0:5:0] Put# [1:1:57:0:0:35:0] Put# [1:1:58:0:0:12:0] Put# [1:1:59:0:0:44:0] Put# [1:1:60:0:0:79:0] Put# [1:1:61:0:0:84:0] Put# [1:1:62:0:0:75:0] Put# [1:1:63:0:0:13:0] Put# [1:1:64:0:0:67:0] Put# [1:1:65:0:0:39:0] Put# [1:1:66:0:0:59:0] Put# [1:1:67:0:0:21:0] Put# [1:1:68:0:0:10:0] Put# [1:1:69:0:0:94:0] Put# [1:1:70:0:0:58:0] Put# [1:1:71:0:0:26:0] Put# [1:1:72:0:0:66:0] Put# [1:1:73:0:0:58:0] Put# [1:1:74:0:0:13:0] Put# [1:1:75:0:0:3:0] Put# [1:1:76:0:0:31:0] Put# [1:1:77:0:0:18:0] Put# [1:1:78:0:0:37:0] Put# [1:1:79:0:0:8:0] Put# [1:1:80:0:0:92:0] Put# [1:1:81:0:0:72:0] Put# [1:1:82:0:0:53:0] Put# [1:1:83:0:0:20:0] Put# [1:1:84:0:0:54:0] Put# [1:1:85:0:0:63:0] Put# [1:1:86:0:0:12:0] Put# [1:1:87:0:0:73:0] Put# [1:1:88:0:0:35:0] Put# [1:1:89:0:0:17:0] Put# [1:1:90:0:0:93:0] Put# [1:1:91:0:0:51:0] Put# [1:1:92:0:0:41:0] Put# [1:1:93:0:0:26:0] Put# [1:1:94:0:0:57:0] Put# [1:1:95:0:0:43:0] Put# [1:1:96:0:0:8:0] Put# [1:1:97:0:0:60:0] Put# [1:1:98:0:0:93:0] Put# [1:1:99:0:0:78:0] Put# [1:1:100:0:0:8:0] Put# [1:1:101:0:0:24:0] Put# [1:1:102:0:0:93:0] Put# [1:1:103:0:0:34:0] Put# [1:1:104:0:0:31:0] Put# [1:1:105:0:0:47:0] Put# [1:1:106:0:0:97:0] Put# [1:1:107:0:0:16:0] Put# [1:1:108:0:0:5:0] Put# [1:1:109:0:0:77:0] Put# [1:1:110:0:0:75:0] Put# [1:1:111:0:0:78:0] Put# [1:1:112:0:0:71:0] Put# [1:1:113:0:0:80:0] Put# [1:1:114:0:0:50:0] Put# [1:1:115:0:0:40:0] Put# [1:1:116:0:0:21:0] Put# [1:1:117:0:0:63:0] Put# [1:1:118:0:0:93:0] Put# [1:1:119:0:0:10:0] Put# [1:1:120:0:0:50:0] Put# [1:1:121:0:0:72:0] Put# [1:1:122:0:0:100:0] Put# [1:1:123:0:0:68:0] Put# [1:1:124:0:0:1:0] Put# [1:1:125:0:0:37:0] Put# [1:1:126:0:0:57:0] Put# [1:1:127:0:0:28:0] Put# [1:1:128:0:0:38:0] Put# [1:1:129:0:0:94:0] Put# [1:1:130:0:0:73:0] Put# [1:1:131:0:0:43:0] Put# [1:1:132:0:0:80:0] Put# [1:1:133:0:0:32:0] Put# [1:1:134:0:0:29:0] Put# [1:1:135:0:0:52:0] Put# [1:1:136:0:0:35:0] Put# [1:1:137:0:0:90:0] Put# [1:1:138:0:0:30:0] Put# [1:1:139:0:0:38:0] Put# [1:1:140:0:0:38:0] Put# [1:1:141:0:0:5:0] Put# [1:1:142:0:0:92:0] Put# [1:1:143:0:0:63:0] Put# [1:1:144:0:0:59:0] Put# [1:1:145:0:0:85:0] Put# [1:1:146:0:0:77:0] Put# [1:1:147:0:0:82:0] Put# [1:1:148:0:0:34:0] Put# [1:1:149:0:0:74:0] Put# [1:1:150:0:0:97:0] Put# [1:1:151:0:0:23:0] Put# [1:1:152:0:0:76:0] Put# [1:1:153:0:0:50:0] Put# [1:1:154:0:0:7:0] Put# [1:1:155:0:0:11:0] Put# [1:1:156:0:0:40:0] Put# [1:1:157:0:0:89:0] Put# [1:1:158:0:0:39:0] Put# [1:1:159:0:0:72:0] Put# [1:1:160:0:0:88:0] Put# [1:1:161:0:0:84:0] Put# [1:1:162:0:0:70:0] Put# [1:1:163:0:0:25:0] Put# [1:1:164:0:0:31:0] Put# [1:1:165:0:0:18:0] Put# [1:1:166:0:0:79:0] Put# [1:1:167:0:0:78:0] Put# [1:1:168:0:0:24:0] Put# [1:1:169:0:0:83:0] Put# [1:1:170:0:0:19:0] Put# [1:1:171:0:0:96:0] Put# [1:1:172:0:0:23:0] Put# [1:1:173:0:0:23:0] Put# [1:1:174:0:0:7:0] Put# [1:1:175:0:0:92:0] Put# [1:1:176:0:0:48:0] Put# [1:1:177:0:0:23:0] Put# [1:1:178:0:0:82:0] Put# [1:1:179:0:0:23:0] Put# [1:1:180:0:0:94:0] Put# [1:1:181:0:0:38:0] Put# [1:1:182:0:0:83:0] Put# [1:1:183:0:0:82:0] Put# [1:1:184:0:0:84:0] Put# [1:1:185:0:0:69:0] Put# [1:1:186:0:0:12:0] Put# [1:1:187:0:0:32:0] Put# [1:1:188:0:0:25:0] Put# [1:1:189:0:0:44:0] Put# [1:1:190:0:0:4:0] Put# [1:1:191:0:0:10:0] Put# [1:1:192:0:0:34:0] Put# [1:1:193:0:0:48:0] Put# [1:1:194:0:0:62:0] Put# [1:1:195:0:0:91:0] Put# [1:1:196:0:0:54:0] Put# [1:1:197:0:0:19:0] Put# [1:1:198:0:0:93:0] Put# [1:1:199:0:0:24:0] Put# [1:1:200:0:0:12:0] Put# [1:1:201:0:0:94:0] Put# [1:1:202:0:0:51:0] Put# [1:1:203:0:0:27:0] Put# [1:1:204:0:0:14:0] Put# [1:1:205:0:0:27:0] Put# [1:1:206:0:0:33:0] Put# [1:1:207:0:0:11:0] Put# [1:1:208:0:0:33:0] Put# [1:1:209:0:0:80:0] Put# [1:1:210:0:0:10:0] Put# [1:1:211:0:0:61:0] Put# [1:1:212:0:0:99:0] Put# [1:1:213:0:0:35:0] Put# [1:1:214:0:0:24:0] Put# [1:1:215:0:0:51:0] Put# [1:1:216:0:0:81:0] Put# [1:1:217:0:0:81:0] Put# [1:1:218:0:0:48:0] Put# [1:1:219:0:0:40:0] Put# [1:1:220:0:0:42:0] Put# [1:1:221:0:0:4:0] Put# [1:1:222:0:0:24:0] Put# [1:1:223:0:0:50:0] Put# [1:1:224:0:0:18:0] Put# [1:1:225:0:0:24:0] Put# [1:1:226:0:0:11:0] Put# [1:1:227:0:0:12:0] Put# [1:1:228:0:0:14:0] Put# [1:1:229:0:0:40:0] Put# [1:1:230:0:0:97:0] Put# [1:1:231:0:0:44:0] Put# [1:1:232:0:0:19:0] Put# [1:1:233:0:0:32:0] Put# [1:1:234:0:0:31:0] Put# [1:1:235:0:0:72:0] Put# [1:1:236:0:0:61:0] Put# [1:1:237:0:0:94:0] Put# [1:1:238:0:0:27:0] Put# [1:1:239:0:0:83:0] Put# [1:1:240:0:0:9:0] Put# [1:1:241:0:0:55:0] Put# [1:1:242:0:0:68:0] Put# [1:1:243:0:0:91:0] Put# [1:1:244:0:0:32:0] Put# [1:1:245:0:0:51:0] Put# [1:1:246:0:0:72:0] Put# [1:1:247:0:0:80:0] Put# [1:1:248:0:0:89:0] Put# [1:1:249:0:0:56:0] Put# [1:1:250:0:0:3:0] Put# [1:1:251:0:0:45:0] Put# [1:1:252:0:0:38:0] Put# [1:1:253:0:0:22:0] Put# [1:1:254:0:0:74:0] Put# [1:1:255:0:0:95:0] Put# [1:1:256:0:0:26:0] Put# [1:1:257:0:0:72:0] Put# [1:1:258:0:0:11:0] Put# [1:1:259:0:0:48:0] Put# [1:1:260:0:0:42:0] Put# [1:1:261:0:0:13:0] Put# [1:1:262:0:0:16:0] Put# [1:1:263:0:0:11:0] Put# [1:1:264:0:0:35:0] Put# [1:1:265:0:0:48:0] Put# [1:1:266:0:0:70:0] Put# [1:1:267:0:0:55:0] Put# [1:1:268:0:0:93:0] Put# [1:1:269:0:0:27:0] Put# [1:1:270:0:0:62:0] Put# [1:1:271:0:0:68:0] Put# [1:1:272:0:0:66:0] Put# [1:1:273:0:0:81:0] Put# [1:1:274:0:0:41:0] Put# [1:1:275:0:0:5:0] Put# [1:1:276:0:0:49:0] Put# [1:1:277:0:0:65:0] Put# [1:1:278:0:0:94:0] Put# [1:1:279:0:0:97:0] Put# [1:1:280:0:0:19:0] Put# [1:1:281:0:0:25:0] Put# [1:1:282:0:0:18:0] Put# [1:1:283:0:0:72:0] Put# [1:1:284:0:0:65:0] Put# [1:1:285:0:0:37:0] Put# [1:1:286:0:0:7:0] Put# [1:1:287:0:0:4:0] Put# [1:1:288:0:0:85:0] Put# [1:1:289:0:0:1:0] Put# [1:1:290:0:0:96:0] Put# [1:1:291:0:0:38:0] Put# [1:1:292:0:0:76:0] Put# [1:1:293:0:0:38:0] Put# [1:1:294:0:0:78:0] Put# [1:1:295:0:0:78:0] Put# [1:1:296:0:0:37:0] Put# [1:1:297:0:0:79:0] Put# [1:1:298:0:0:27:0] Put# [1:1:299:0:0:6:0] Put# [1:1:300:0:0:36:0] Put# [1:1:301:0:0:9:0] Put# [1:1:302:0:0:42:0] Put# [1:1:303:0:0:28:0] Put# [1:1:304:0:0:64:0] Put# [1:1:305:0:0:65:0] Put# [1:1:306:0:0:100:0] Put# [1:1:307:0:0:2:0] Put# [1:1:308:0:0:85:0] Put# [1:1:309:0:0:88:0] Put# [1:1:310:0:0:82:0] Put# [1:1:311:0:0:89:0] Put# [1:1:312:0:0:64:0] Put# [1:1:313:0:0:60:0] Put# [1:1:314:0:0:83:0] Put# [1:1:315:0:0:11:0] Put# [1:1:316:0:0:26:0] Put# [1:1:317:0:0:29:0] Put# [1:1:318:0:0:27:0] Put# [1:1:319:0:0:98:0] Put# [1:1:320:0:0:92:0] Put# [1:1:321:0:0:85:0] Put# [1:1:322:0:0:50:0] Put# [1:1:323:0:0:93:0] Put# [1:1:324:0:0:14:0] Put# [1:1:325:0:0:24:0] Put# [1:1:326:0:0:12:0] Put# [1:1:327:0:0:28:0] Put# [1:1:328:0:0:47:0] Put# [1:1:329:0:0:38:0] Put# [1:1:330:0:0:6:0] Put# [1:1:331:0:0:36:0] Put# [1:1:332:0:0:25:0] Put# [1:1:333:0:0:30:0] Put# [1:1:334:0:0:46:0] Put# [1:1:335:0:0:64:0] Put# [1:1:336:0:0:80:0] Put# [1:1:337:0:0:92:0] Put# [1:1:338:0:0:11:0] Put# [1:1:339:0:0:28:0] Put# [1:1:340:0:0:47:0] Put# [1:1:341:0:0:70:0] Put# [1:1:342:0:0:14:0] Put# [1:1:343:0:0:12:0] Put# [1:1:344:0:0:83:0] Put# [1:1:345:0:0:70:0] Put# [1:1:346:0:0:84:0] Put# [1:1:347:0:0:79:0] Put# [1:1:348:0:0:57:0] Put# [1:1:349:0:0:84:0] Put# [1:1:350:0:0:88:0] Put# [1:1:351:0:0:61:0] Put# [1:1:352:0:0:68:0] Put# [1:1:353:0:0:18:0] Put# [1:1:354:0:0:15:0] Put# [1:1:355:0:0:54:0] Put# [1:1:356:0:0:17:0] Put# [1:1:357:0:0:11:0] Put# [1:1:358:0:0:95:0] Put# [1:1:359:0:0:86:0] Put# [1:1:360:0:0:63:0] Put# [1:1:361:0:0:99:0] Put# [1:1:362:0:0:65:0] Put# [1:1:363:0:0:91:0] Put# [1:1:364:0:0:15:0] Put# [1:1:365:0:0:68:0] Put# [1:1:366:0:0:67:0] Put# [1:1:367:0:0:82:0] Put# [1:1:368:0:0:52:0] Put# [1:1:369:0:0:18:0] Put# [1:1:370:0:0:24:0] Put# [1:1:371:0:0:6:0] Put# [1:1:372:0:0:10:0] Put# [1:1:373:0:0:21:0] Put# [1:1:374:0:0:4:0] Put# [1:1:375:0:0:19:0] Put# [1:1:376:0:0:15:0] Put# [1:1:377:0:0:10:0] Put# [1:1:378:0:0:95:0] Put# [1:1:379:0:0:45:0] Put# [1:1:380:0:0:86:0] Put# [1:1:381:0:0:43:0] Put# [1:1:382:0:0:52:0] Put# [1:1:383:0:0:50:0] Put# [1:1:384:0:0:62:0] Put# [1:1:385:0:0:40:0] Put# [1:1:386:0:0:11:0] Put# [1:1:387:0:0:90:0] Put# [1:1:388:0:0:5:0] Put# [1:1:389:0:0:74:0] Put# [1:1:390:0:0:63:0] Put# [1:1:391:0:0:92:0] Put# [1:1:392:0:0:62:0] Put# [1:1:393:0:0:39:0] Put# [1:1:394:0:0:19:0] Put# [1:1:395:0:0:93:0] Put# [1:1:396:0:0:70:0] Put# [1:1:397:0:0:26:0] Put# [1:1:398:0:0:35:0] Put# [1:1:399:0:0:76:0] Put# [1:1:400:0:0:28:0] Put# [1:1:401:0:0:17:0] Put# [1:1:402:0:0:58:0] Put# [1:1:403:0:0:14:0] Put# [1:1:404:0:0:42:0] Put# [1:1:405:0:0:37:0] Put# [1:1:406:0:0:33:0] Put# [1:1:407:0:0:91:0] Put# [1:1:408:0:0:4:0] Put# [1:1:409:0:0:37:0] Put# [1:1:410:0:0:40:0] Put# [1:1:411:0:0:25:0] Put# [1:1:412:0:0:66:0] Put# [1:1:413:0:0:72:0] Put# [1:1:414:0:0:95:0] Put# [1:1:415:0:0:36:0] Put# [1:1:416:0:0:4:0] Put# [1:1:417:0:0:32:0] Put# [1:1:418:0:0:55:0] Put# [1:1:419:0:0:85:0] Put# [1:1:420:0:0:7:0] Put# [1:1:421:0:0:59:0] Put# [1:1:422:0:0:52:0] Put# [1:1:423:0:0:55:0] Put# [1:1:424:0:0:47:0] Put# [1:1:425:0:0:50:0] Put# [1:1:426:0:0:94:0] Put# [1:1:427:0:0:82:0] Put# [1:1:428:0:0:46:0] Put# [1:1:429:0:0:76:0] Put# [1:1:430:0:0:35:0] Put# [1:1:431:0:0:27:0] Put# [1:1:432:0:0:92:0] Put# [1:1:433:0:0:39:0] Put# [1:1:434:0:0:50:0] Put# [1:1:435:0:0:7:0] Put# [1:1:436:0:0:80:0] Put# [1:1:437:0:0:44:0] Put# [1:1:438:0:0:51:0] Put# [1:1:439:0:0:48:0] Put# [1:1:440:0:0:31:0] Put# [1:1:441:0:0:26:0] Put# [1:1:442:0:0:94:0] Put# [1:1:443:0:0:42:0] Put# [1:1:444:0:0:99:0] Put# [1:1:445:0:0:33:0] Put# [1: ... 0:0] Put# [1:4:8580:0:0:35:0] Put# [1:4:8581:0:0:62:0] Put# [1:4:8582:0:0:87:0] Put# [1:4:8583:0:0:82:0] Put# [1:4:8584:0:0:73:0] Put# [1:4:8585:0:0:80:0] Put# [1:4:8586:0:0:34:0] Put# [1:4:8587:0:0:40:0] Put# [1:4:8588:0:0:46:0] Put# [1:4:8589:0:0:49:0] Put# [1:4:8590:0:0:85:0] Put# [1:4:8591:0:0:7:0] Put# [1:4:8592:0:0:80:0] Put# [1:4:8593:0:0:81:0] Put# [1:4:8594:0:0:94:0] Put# [1:4:8595:0:0:41:0] Put# [1:4:8596:0:0:91:0] Put# [1:4:8597:0:0:63:0] Put# [1:4:8598:0:0:16:0] Put# [1:4:8599:0:0:66:0] Put# [1:4:8600:0:0:18:0] Put# [1:4:8601:0:0:100:0] Put# [1:4:8602:0:0:32:0] Put# [1:4:8603:0:0:55:0] Put# [1:4:8604:0:0:61:0] Put# [1:4:8605:0:0:82:0] Put# [1:4:8606:0:0:6:0] Put# [1:4:8607:0:0:91:0] Put# [1:4:8608:0:0:39:0] Put# [1:4:8609:0:0:32:0] Put# [1:4:8610:0:0:43:0] Put# [1:4:8611:0:0:1:0] Put# [1:4:8612:0:0:83:0] Put# [1:4:8613:0:0:86:0] Put# [1:4:8614:0:0:99:0] Put# [1:4:8615:0:0:74:0] Put# [1:4:8616:0:0:94:0] Put# [1:4:8617:0:0:49:0] Put# [1:4:8618:0:0:37:0] Put# [1:4:8619:0:0:73:0] Put# [1:4:8620:0:0:90:0] Put# [1:4:8621:0:0:52:0] Put# [1:4:8622:0:0:55:0] Put# [1:4:8623:0:0:89:0] Put# [1:4:8624:0:0:74:0] Put# [1:4:8625:0:0:19:0] Put# [1:4:8626:0:0:82:0] Put# [1:4:8627:0:0:97:0] Put# [1:4:8628:0:0:47:0] Put# [1:4:8629:0:0:25:0] Put# [1:4:8630:0:0:69:0] Put# [1:4:8631:0:0:87:0] Put# [1:4:8632:0:0:52:0] Put# [1:4:8633:0:0:98:0] Put# [1:4:8634:0:0:5:0] Put# [1:4:8635:0:0:34:0] Put# [1:4:8636:0:0:34:0] Put# [1:4:8637:0:0:63:0] Put# [1:4:8638:0:0:80:0] Put# [1:4:8639:0:0:32:0] Put# [1:4:8640:0:0:100:0] Put# [1:4:8641:0:0:14:0] Put# [1:4:8642:0:0:53:0] Put# [1:4:8643:0:0:12:0] Put# [1:4:8644:0:0:75:0] Put# [1:4:8645:0:0:95:0] Put# [1:4:8646:0:0:82:0] Put# [1:4:8647:0:0:71:0] Put# [1:4:8648:0:0:16:0] Put# [1:4:8649:0:0:8:0] Put# [1:4:8650:0:0:94:0] Put# [1:4:8651:0:0:80:0] Put# [1:4:8652:0:0:47:0] Put# [1:4:8653:0:0:34:0] Put# [1:4:8654:0:0:8:0] Put# [1:4:8655:0:0:87:0] Put# [1:4:8656:0:0:9:0] Put# [1:4:8657:0:0:43:0] Put# [1:4:8658:0:0:43:0] Put# [1:4:8659:0:0:67:0] Put# [1:4:8660:0:0:55:0] Put# [1:4:8661:0:0:93:0] Put# [1:4:8662:0:0:50:0] Put# [1:4:8663:0:0:38:0] Put# [1:4:8664:0:0:89:0] Put# [1:4:8665:0:0:42:0] Put# [1:4:8666:0:0:38:0] Put# [1:4:8667:0:0:27:0] Put# [1:4:8668:0:0:4:0] Put# [1:4:8669:0:0:73:0] Put# [1:4:8670:0:0:69:0] Put# [1:4:8671:0:0:33:0] Put# [1:4:8672:0:0:38:0] Put# [1:4:8673:0:0:97:0] Put# [1:4:8674:0:0:83:0] Put# [1:4:8675:0:0:93:0] Put# [1:4:8676:0:0:62:0] Put# [1:4:8677:0:0:58:0] Put# [1:4:8678:0:0:13:0] Put# [1:4:8679:0:0:54:0] Put# [1:4:8680:0:0:4:0] Put# [1:4:8681:0:0:70:0] Put# [1:4:8682:0:0:68:0] Put# [1:4:8683:0:0:72:0] Put# [1:4:8684:0:0:30:0] Put# [1:4:8685:0:0:23:0] Put# [1:4:8686:0:0:15:0] Put# [1:4:8687:0:0:65:0] Put# [1:4:8688:0:0:61:0] Put# [1:4:8689:0:0:50:0] Put# [1:4:8690:0:0:97:0] Put# [1:4:8691:0:0:33:0] Put# [1:4:8692:0:0:31:0] Put# [1:4:8693:0:0:8:0] Put# [1:4:8694:0:0:68:0] Put# [1:4:8695:0:0:98:0] Put# [1:4:8696:0:0:47:0] Put# [1:4:8697:0:0:67:0] Put# [1:4:8698:0:0:90:0] Put# [1:4:8699:0:0:75:0] Put# [1:4:8700:0:0:26:0] Put# [1:4:8701:0:0:20:0] Put# [1:4:8702:0:0:10:0] Put# [1:4:8703:0:0:85:0] Put# [1:4:8704:0:0:73:0] Put# [1:4:8705:0:0:20:0] Put# [1:4:8706:0:0:4:0] Put# [1:4:8707:0:0:55:0] Put# [1:4:8708:0:0:44:0] Put# [1:4:8709:0:0:15:0] Put# [1:4:8710:0:0:38:0] Put# [1:4:8711:0:0:16:0] Put# [1:4:8712:0:0:92:0] Put# [1:4:8713:0:0:44:0] Put# [1:4:8714:0:0:57:0] Put# [1:4:8715:0:0:47:0] Put# [1:4:8716:0:0:80:0] Put# [1:4:8717:0:0:4:0] Put# [1:4:8718:0:0:63:0] Put# [1:4:8719:0:0:16:0] Put# [1:4:8720:0:0:92:0] Put# [1:4:8721:0:0:92:0] Put# [1:4:8722:0:0:21:0] Put# [1:4:8723:0:0:83:0] Put# [1:4:8724:0:0:18:0] Put# [1:4:8725:0:0:49:0] Put# [1:4:8726:0:0:51:0] Put# [1:4:8727:0:0:75:0] Put# [1:4:8728:0:0:24:0] Put# [1:4:8729:0:0:22:0] Put# [1:4:8730:0:0:48:0] Put# [1:4:8731:0:0:75:0] Put# [1:4:8732:0:0:72:0] Put# [1:4:8733:0:0:22:0] Put# [1:4:8734:0:0:68:0] Put# [1:4:8735:0:0:75:0] Put# [1:4:8736:0:0:37:0] Put# [1:4:8737:0:0:69:0] Put# [1:4:8738:0:0:59:0] Put# [1:4:8739:0:0:87:0] Put# [1:4:8740:0:0:67:0] Put# [1:4:8741:0:0:93:0] Put# [1:4:8742:0:0:68:0] Put# [1:4:8743:0:0:2:0] Put# [1:4:8744:0:0:95:0] Put# [1:4:8745:0:0:24:0] Put# [1:4:8746:0:0:8:0] Put# [1:4:8747:0:0:42:0] Put# [1:4:8748:0:0:38:0] Put# [1:4:8749:0:0:41:0] Put# [1:4:8750:0:0:80:0] Put# [1:4:8751:0:0:18:0] Put# [1:4:8752:0:0:41:0] Put# [1:4:8753:0:0:96:0] Put# [1:4:8754:0:0:40:0] Put# [1:4:8755:0:0:98:0] Put# [1:4:8756:0:0:2:0] Put# [1:4:8757:0:0:83:0] Put# [1:4:8758:0:0:66:0] Put# [1:4:8759:0:0:66:0] Put# [1:4:8760:0:0:34:0] Put# [1:4:8761:0:0:48:0] Put# [1:4:8762:0:0:8:0] Put# [1:4:8763:0:0:17:0] Put# [1:4:8764:0:0:46:0] Put# [1:4:8765:0:0:28:0] Put# [1:4:8766:0:0:100:0] Put# [1:4:8767:0:0:57:0] Put# [1:4:8768:0:0:5:0] Put# [1:4:8769:0:0:18:0] Put# [1:4:8770:0:0:50:0] Put# [1:4:8771:0:0:96:0] Put# [1:4:8772:0:0:62:0] Put# [1:4:8773:0:0:89:0] Put# [1:4:8774:0:0:50:0] Put# [1:4:8775:0:0:76:0] Put# [1:4:8776:0:0:76:0] Put# [1:4:8777:0:0:51:0] Put# [1:4:8778:0:0:34:0] Put# [1:4:8779:0:0:12:0] Put# [1:4:8780:0:0:68:0] Put# [1:4:8781:0:0:59:0] Put# [1:4:8782:0:0:99:0] Put# [1:4:8783:0:0:6:0] Put# [1:4:8784:0:0:16:0] Put# [1:4:8785:0:0:67:0] Put# [1:4:8786:0:0:90:0] Put# [1:4:8787:0:0:6:0] Put# [1:4:8788:0:0:25:0] Put# [1:4:8789:0:0:71:0] Put# [1:4:8790:0:0:62:0] Put# [1:4:8791:0:0:5:0] Put# [1:4:8792:0:0:84:0] Put# [1:4:8793:0:0:49:0] Put# [1:4:8794:0:0:83:0] Put# [1:4:8795:0:0:86:0] Put# [1:4:8796:0:0:70:0] Put# [1:4:8797:0:0:94:0] Put# [1:4:8798:0:0:14:0] Put# [1:4:8799:0:0:98:0] Put# [1:4:8800:0:0:63:0] Put# [1:4:8801:0:0:55:0] Put# [1:4:8802:0:0:8:0] Put# [1:4:8803:0:0:69:0] Put# [1:4:8804:0:0:88:0] Put# [1:4:8805:0:0:62:0] Put# [1:4:8806:0:0:28:0] Put# [1:4:8807:0:0:63:0] Put# [1:4:8808:0:0:65:0] Put# [1:4:8809:0:0:12:0] Put# [1:4:8810:0:0:76:0] Put# [1:4:8811:0:0:43:0] Put# [1:4:8812:0:0:16:0] Put# [1:4:8813:0:0:54:0] Put# [1:4:8814:0:0:90:0] Put# [1:4:8815:0:0:70:0] Put# [1:4:8816:0:0:64:0] Put# [1:4:8817:0:0:96:0] Put# [1:4:8818:0:0:27:0] Put# [1:4:8819:0:0:89:0] Put# [1:4:8820:0:0:60:0] Put# [1:4:8821:0:0:58:0] Put# [1:4:8822:0:0:86:0] Put# [1:4:8823:0:0:67:0] Put# [1:4:8824:0:0:10:0] Put# [1:4:8825:0:0:23:0] Put# [1:4:8826:0:0:7:0] Put# [1:4:8827:0:0:19:0] Put# [1:4:8828:0:0:88:0] Put# [1:4:8829:0:0:68:0] Put# [1:4:8830:0:0:88:0] Put# [1:4:8831:0:0:72:0] Put# [1:4:8832:0:0:42:0] Put# [1:4:8833:0:0:76:0] Put# [1:4:8834:0:0:69:0] Put# [1:4:8835:0:0:13:0] Put# [1:4:8836:0:0:15:0] Put# [1:4:8837:0:0:63:0] Put# [1:4:8838:0:0:16:0] Put# [1:4:8839:0:0:57:0] Put# [1:4:8840:0:0:79:0] Put# [1:4:8841:0:0:27:0] Put# [1:4:8842:0:0:56:0] Put# [1:4:8843:0:0:18:0] Put# [1:4:8844:0:0:21:0] Put# [1:4:8845:0:0:1:0] Put# [1:4:8846:0:0:16:0] Put# [1:4:8847:0:0:97:0] Put# [1:4:8848:0:0:86:0] Put# [1:4:8849:0:0:86:0] Put# [1:4:8850:0:0:65:0] Put# [1:4:8851:0:0:87:0] Put# [1:4:8852:0:0:42:0] Put# [1:4:8853:0:0:19:0] Put# [1:4:8854:0:0:9:0] Put# [1:4:8855:0:0:93:0] Put# [1:4:8856:0:0:44:0] Put# [1:4:8857:0:0:40:0] Put# [1:4:8858:0:0:86:0] Put# [1:4:8859:0:0:91:0] Put# [1:4:8860:0:0:58:0] Put# [1:4:8861:0:0:3:0] Put# [1:4:8862:0:0:17:0] Put# [1:4:8863:0:0:90:0] Put# [1:4:8864:0:0:96:0] Put# [1:4:8865:0:0:33:0] Put# [1:4:8866:0:0:98:0] Put# [1:4:8867:0:0:32:0] Put# [1:4:8868:0:0:71:0] Put# [1:4:8869:0:0:85:0] Put# [1:4:8870:0:0:40:0] Put# [1:4:8871:0:0:17:0] Put# [1:4:8872:0:0:14:0] Put# [1:4:8873:0:0:58:0] Put# [1:4:8874:0:0:97:0] Put# [1:4:8875:0:0:9:0] Put# [1:4:8876:0:0:16:0] Put# [1:4:8877:0:0:77:0] Put# [1:4:8878:0:0:35:0] Put# [1:4:8879:0:0:63:0] Put# [1:4:8880:0:0:56:0] Put# [1:4:8881:0:0:96:0] Put# [1:4:8882:0:0:30:0] Put# [1:4:8883:0:0:67:0] Put# [1:4:8884:0:0:71:0] Put# [1:4:8885:0:0:80:0] Put# [1:4:8886:0:0:61:0] Put# [1:4:8887:0:0:23:0] Put# [1:4:8888:0:0:3:0] Put# [1:4:8889:0:0:27:0] Put# [1:4:8890:0:0:100:0] Put# [1:4:8891:0:0:40:0] Put# [1:4:8892:0:0:43:0] Put# [1:4:8893:0:0:22:0] Put# [1:4:8894:0:0:92:0] Put# [1:4:8895:0:0:50:0] Put# [1:4:8896:0:0:87:0] Put# [1:4:8897:0:0:13:0] Put# [1:4:8898:0:0:97:0] Put# [1:4:8899:0:0:38:0] Put# [1:4:8900:0:0:34:0] Put# [1:4:8901:0:0:28:0] Put# [1:4:8902:0:0:98:0] Put# [1:4:8903:0:0:11:0] Put# [1:4:8904:0:0:54:0] Put# [1:4:8905:0:0:84:0] Put# [1:4:8906:0:0:39:0] Put# [1:4:8907:0:0:28:0] Put# [1:4:8908:0:0:63:0] Put# [1:4:8909:0:0:21:0] Put# [1:4:8910:0:0:24:0] Put# [1:4:8911:0:0:5:0] Put# [1:4:8912:0:0:16:0] Put# [1:4:8913:0:0:53:0] Put# [1:4:8914:0:0:30:0] Put# [1:4:8915:0:0:53:0] Put# [1:4:8916:0:0:34:0] Put# [1:4:8917:0:0:64:0] Put# [1:4:8918:0:0:40:0] Put# [1:4:8919:0:0:88:0] Put# [1:4:8920:0:0:3:0] Put# [1:4:8921:0:0:100:0] Put# [1:4:8922:0:0:24:0] Put# [1:4:8923:0:0:28:0] Put# [1:4:8924:0:0:84:0] Put# [1:4:8925:0:0:99:0] Put# [1:4:8926:0:0:90:0] Put# [1:4:8927:0:0:66:0] Put# [1:4:8928:0:0:38:0] Put# [1:4:8929:0:0:52:0] Put# [1:4:8930:0:0:100:0] Put# [1:4:8931:0:0:71:0] Put# [1:4:8932:0:0:95:0] Put# [1:4:8933:0:0:80:0] Put# [1:4:8934:0:0:63:0] Put# [1:4:8935:0:0:3:0] Put# [1:4:8936:0:0:69:0] Put# [1:4:8937:0:0:7:0] Put# [1:4:8938:0:0:80:0] Put# [1:4:8939:0:0:8:0] Put# [1:4:8940:0:0:61:0] Put# [1:4:8941:0:0:66:0] Put# [1:4:8942:0:0:54:0] Put# [1:4:8943:0:0:11:0] Put# [1:4:8944:0:0:71:0] Put# [1:4:8945:0:0:94:0] Put# [1:4:8946:0:0:72:0] Put# [1:4:8947:0:0:47:0] Put# [1:4:8948:0:0:76:0] Put# [1:4:8949:0:0:85:0] Put# [1:4:8950:0:0:36:0] Put# [1:4:8951:0:0:18:0] Put# [1:4:8952:0:0:92:0] Put# [1:4:8953:0:0:17:0] Put# [1:4:8954:0:0:97:0] Put# [1:4:8955:0:0:49:0] Put# [1:4:8956:0:0:58:0] Put# [1:4:8957:0:0:96:0] Put# [1:4:8958:0:0:85:0] Put# [1:4:8959:0:0:58:0] Put# [1:4:8960:0:0:73:0] Put# [1:4:8961:0:0:91:0] Put# [1:4:8962:0:0:100:0] Put# [1:4:8963:0:0:24:0] Put# [1:4:8964:0:0:86:0] Put# [1:4:8965:0:0:68:0] Put# [1:4:8966:0:0:46:0] Put# [1:4:8967:0:0:40:0] Put# [1:4:8968:0:0:57:0] Put# [1:4:8969:0:0:91:0] Put# [1:4:8970:0:0:41:0] Put# [1:4:8971:0:0:20:0] Put# [1:4:8972:0:0:33:0] Put# [1:4:8973:0:0:50:0] Put# [1:4:8974:0:0:82:0] Put# [1:4:8975:0:0:26:0] Put# [1:4:8976:0:0:96:0] Put# [1:4:8977:0:0:66:0] Put# [1:4:8978:0:0:19:0] Put# [1:4:8979:0:0:74:0] Put# [1:4:8980:0:0:17:0] Put# [1:4:8981:0:0:99:0] Put# [1:4:8982:0:0:96:0] Put# [1:4:8983:0:0:32:0] Put# [1:4:8984:0:0:50:0] Put# [1:4:8985:0:0:98:0] Put# [1:4:8986:0:0:35:0] Put# [1:4:8987:0:0:89:0] Put# [1:4:8988:0:0:90:0] Put# [1:4:8989:0:0:84:0] Put# [1:4:8990:0:0:9:0] Put# [1:4:8991:0:0:86:0] Put# [1:4:8992:0:0:65:0] Put# [1:4:8993:0:0:5:0] Put# [1:4:8994:0:0:57:0] Put# [1:4:8995:0:0:36:0] Put# [1:4:8996:0:0:83:0] Put# [1:4:8997:0:0:54:0] Put# [1:4:8998:0:0:75:0] Put# [1:4:8999:0:0:12:0] Put# [1:4:9000:0:0:94:0] Put# [1:4:9001:0:0:19:0] Put# [1:4:9002:0:0:35:0] Put# [1:4:9003:0:0:10:0] Put# [1:4:9004:0:0:27:0] Put# [1:4:9005:0:0:97:0] Put# [1:4:9006:0:0:64:0] Put# [1:4:9007:0:0:73:0] Put# [1:4:9008:0:0:70:0] Put# [1:4:9009:0:0:66:0] Put# [1:4:9010:0:0:97:0] Put# [1:4:9011:0:0:84:0] Put# [1:4:9012:0:0:92:0] Put# [1:4:9013:0:0:9:0] Put# [1:4:9014:0:0:48:0] Put# [1:4:9015:0:0:80:0] Put# [1:4:9016:0:0:31:0] Put# [1:4:9017:0:0:70:0] Put# [1:4:9018:0:0:66:0] Put# [1:4:9019:0:0:20:0] Put# [1:4:9020:0:0:2:0] Put# [1:4:9021:0:0:73:0] Put# [1:4:9022:0:0:96:0] Put# [1:4:9023:0:0:41:0] Put# [1:4:9024:0:0:78:0] Put# [1:4:9025:0:0:38:0] Put# [1:4:9026:0:0:50:0] Put# [1:4:9027:0:0:20:0] Put# [1:4:9028:0:0:90:0] Put# [1:4:9029:0:0:67:0] Put# [1:4:9030:0:0:96:0] Put# [1:4:9031:0:0:38:0] Put# [1:4:9032:0:0:50:0] Put# [1:4:9033:0:0:31:0] Put# [1:4:9034:0:0:58:0] Put# [1:4:9035:0:0:52:0] Put# [1:4:9036:0:0:44:0] Put# [1:4:9037:0:0:10:0] Put# [1:4:9038:0:0:7:0] Put# [1:4:9039:0:0:74:0] Put# [1:4:9040:0:0:28:0] Put# [1:4:9041:0:0:69:0] Put# [1:4:9042:0:0:80:0] Put# [1:4:9043:0:0:21:0] Put# [1:4:9044:0:0:32:0] Put# [1:4:9045:0:0:82:0] Put# [1:4:9046:0:0:48:0] Put# [1:4:9047:0:0:13:0] Put# [1:4:9048:0:0:81:0] Put# [1:4:9049:0:0:93:0] Put# [1:4:9050:0:0:36:0] Put# [1:4:9051:0:0:88:0] Put# [1:4:9052:0:0:57:0] Put# [1:4:9053:0:0:27:0] Put# [1:4:9054:0:0:74:0] Put# [1:4:9055:0:0:35:0] Put# [1:4:9056:0:0:27:0] Put# [1:4:9057:0:0:39:0] Put# [1:4:9058:0:0:99:0] Put# [1:4:9059:0:0:72:0] Put# [1:4:9060:0:0:35:0] |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::ManyPartitions_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithFullTextIndex [GOOD] Test command err: 2026-02-19T08:26:03.900758Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489740364403896:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:03.900815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:04.057065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:04.072088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:04.072185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:04.099147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.136972Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489740364403865:2081] 1771489563899689 != 1771489563899692 2026-02-19T08:26:04.143762Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:04.185075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:04.185096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:04.185103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:04.185202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:04.286925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:06.132055Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006af/r3tmp/spilling-tmp-runner/node_1_827a3fd5-894198b1-48a74aa7-988d0066, actor: [1:7608489753249306597:2309] 2026-02-19T08:26:06.132271Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006af/r3tmp/spilling-tmp-runner |93.9%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchPEA::ExecuteState_AbortBeforeAnyResponse >> KqpBatchDelete::TableWithAsyncIndex+EnableIndexStreamWrite >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn-useSink >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> KqpBatchDelete::TableWithAsyncIndex+EnableIndexStreamWrite [GOOD] >> KqpBatchDelete::TableWithAsyncIndex-EnableIndexStreamWrite >> KqpBatchPEA::ExecuteState_AbortBeforeAnyResponse [GOOD] >> KqpBatchPEA::ExecuteState_AbortBeforeDelayedResponses >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table [GOOD] >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query >> TxUsage::WriteToTopic_Demo_38_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 4014208984619926373 2026-02-19T08:21:48.181637Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:21:48.205546Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:21:48.205633Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:21:48.208305Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:21:48.223958Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:21:48.226649Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 received TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:24:51.808472Z 8 00h01m07.897270s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 20058 2026-02-19T08:26:08.249610Z 7 00h01m09.761794s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 35397 2026-02-19T08:26:15.073232Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet received PoisonPill, going to die 2026-02-19T08:26:15.073305Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:26:15.227320Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 received TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} >> KqpBatchDelete::TableWithAsyncIndex-EnableIndexStreamWrite [GOOD] >> TxUsage::WriteToTopic_Demo_38_Query >> BasicUsage::KeyedWriteSession_CloseTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithAsyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:26:10.095126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489772095642037:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:10.095200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:10.296530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:10.296614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:10.324257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:10.334373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:10.371953Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:10.374059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489772095642006:2081] 1771489570093506 != 1771489570093509 2026-02-19T08:26:10.418321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:10.418343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:10.418349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:10.418418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:10.595604Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:12.308428Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006aa/r3tmp/spilling-tmp-runner/node_1_2dc59d23-a5e0c47f-c40d7f37-d4063b68, actor: [1:7608489780685577443:2309] 2026-02-19T08:26:12.308601Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006aa/r3tmp/spilling-tmp-runner 2026-02-19T08:26:12.795340Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.795394Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:12.795576Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:12.795627Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:12.795766Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 1 2026-02-19T08:26:12.795872Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.795979Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '"2")) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (return (Iterator (AsList $4 $5 $6 $7 $8))) )))) ) 2026-02-19T08:26:12.795997Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:12.796156Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:12.796476Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489780685577775:2326] 2026-02-19T08:26:12.796501Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489780685577775:2326], channels: 0 2026-02-19T08:26:12.796541Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:12.796558Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:12.796573Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489780685577775:2326] 2026-02-19T08:26:12.796596Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489780685577775:2326], channels: 0 2026-02-19T08:26:12.796637Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489780685577775:2326], trace_id# 2026-02-19T08:26:12.796672Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489780685577775:2326], trace_id# 2026-02-19T08:26:12.796697Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:12.804756Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489780685577775:2326] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:12.804834Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489780685577775:2326], trace_id# 2026-02-19T08:26:12.804869Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489780685577775:2326], trace_id# 2026-02-19T08:26:12.805549Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489780685577772:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489780685577775:2326] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1575 Tasks { TaskId: 1 CpuTimeUs: 927 FinishTimeMs: 1771489572805 EgressBytes: 105 EgressRows: 5 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 901 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489572796 UpdateTimeMs: 1771489572805 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:12.805587Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710661. Ctx: { TraceId: 01khtg3dsy946xz7mstpges81q, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2NDg5MjItNTdlMGY0YzgtYTBmNTg1NTYtZGMyMDZmOTE=, PoolId: default, IsStreamingQuery: 0}. Compute acto ... 02649299207:2389] 2026-02-19T08:26:17.474711Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489802649299208:2390], channels: 2 2026-02-19T08:26:17.474726Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489802649299207:2389], channels: 1 2026-02-19T08:26:17.474824Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# true trace_id# 2026-02-19T08:26:17.474849Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:17.474865Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [2:7608489802649299208:2390] 2026-02-19T08:26:17.474885Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [2:7608489802649299207:2389] 2026-02-19T08:26:17.474911Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489802649299208:2390], channels: 2 2026-02-19T08:26:17.474925Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7608489802649299207:2389], channels: 1 2026-02-19T08:26:17.474972Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.475003Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.475036Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:17.475102Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489802649299207:2389] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:17.475133Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.475160Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.475623Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489802649299208:2390] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:17.475654Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.475683Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:7608489802649299208:2390], CA [2:7608489802649299207:2389], trace_id# 2026-02-19T08:26:17.476506Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489802649299207:2389] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1127 Tasks { TaskId: 1 CpuTimeUs: 509 FinishTimeMs: 1771489577476 OutputBytes: 1 Tables { TablePath: "/Root/global_async_idx/idx/indexImplTable" AffectedPartitions: 1 } ComputeCpuTimeUs: 50 BuildCpuTimeUs: 459 WaitOutputTimeUs: 28 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489577476 CreateTimeMs: 1771489577474 UpdateTimeMs: 1771489577476 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:17.476564Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489802649299207:2389] 2026-02-19T08:26:17.476588Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7608489802649299208:2390], trace_id# 2026-02-19T08:26:17.476607Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7608489802649299208:2390], trace_id# 2026-02-19T08:26:17.476714Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7608489802649299208:2390] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 675 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 221 FinishTimeMs: 1771489577476 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 88 BuildCpuTimeUs: 133 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489577476 CreateTimeMs: 1771489577475 UpdateTimeMs: 1771489577476 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:17.476738Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489802649299208:2390] 2026-02-19T08:26:17.476841Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:17.476873Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:17.476903Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489802649299201:2325] TxId: 281474976710665. Ctx: { TraceId: 01khtg3j7w53q8j8wve7wm0qag, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTg2ZGRlNWUtYTk3YzY4MzYtZWU5NGQzNzYtYzJjNjZjYWQ=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001802s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> KqpBatchPEA::ExecuteState_AbortBeforeDelayedResponses [GOOD] >> KqpBatchPEA::ExecuteState_ChildExecuterRetryLimitExceeded >> TxUsage::WriteToTopic_Demo_47_Table [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::ExecuteState_AbortBeforeDelayedResponses [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:11.585634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:11.651493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:11.663592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:11.663639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:11.663936Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:11.848337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:11.848422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:11.898078Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489569704589 != 1771489569704593 2026-02-19T08:26:11.904965Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:11.947287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:12.057213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:12.352198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:12.352257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:12.352304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:12.352630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:12.363820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:13.427510Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:13.427622Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:13.427839Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:13.427917Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:13.428122Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:26:13.428300Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:13.428481Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:26:13.428531Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:13.428751Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:13.429176Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:26:13.429238Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:26:13.429331Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:13.429374Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:13.429415Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:26:13.429460Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:26:13.429528Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:26:13.429580Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:26:13.429624Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:13.467113Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:13.467240Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:26:13.467294Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:26:13.468142Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1724 Tasks { TaskId: 1 CpuTimeUs: 1014 FinishTimeMs: 1771489573467 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 36 BuildCpuTimeUs: 978 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489573429 UpdateTimeMs: 1771489573467 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:13.468196Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjVlYzUzZDctMzRlM2M1YmUtNDI4Y2Q4MGYtM2VhMmE0NDU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1225:2780] 2026-02-19T08:26:13.468258Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3ee57x45jbg3yxjqp44c, Database: /Root, SessionId: ydb ... ion# ComputeTime: 0.000636s ReadRows: 6 ReadBytes: 48 RequestUnits# 6 ForceFlag# true trace_id# 2026-02-19T08:26:18.786321Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1319:3052] TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1343:3071] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 253 DurationUs: 2000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 66 FinishTimeMs: 1771489578784 InputRows: 6 InputBytes: 84 EgressBytes: 90 EgressRows: 6 ComputeCpuTimeUs: 36 BuildCpuTimeUs: 30 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489578782 CreateTimeMs: 1771489578776 UpdateTimeMs: 1771489578784 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:18.786349Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1343:3071] 2026-02-19T08:26:18.786396Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:1319:3052] TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1318:3052] trace_id# 2026-02-19T08:26:18.786455Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1319:3052] TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000584s ReadRows: 6 ReadBytes: 48 RequestUnits# 6 ForceFlag# true trace_id# 2026-02-19T08:26:18.786609Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1321:3052] TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1347:3073] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 250 DurationUs: 2000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 60 FinishTimeMs: 1771489578785 InputRows: 3 InputBytes: 45 EgressBytes: 45 EgressRows: 3 ComputeCpuTimeUs: 33 BuildCpuTimeUs: 27 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489578783 CreateTimeMs: 1771489578778 UpdateTimeMs: 1771489578785 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:18.786635Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1347:3073] 2026-02-19T08:26:18.786678Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:1321:3052] TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1320:3052] trace_id# 2026-02-19T08:26:18.786703Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1321:3052] TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000527s ReadRows: 3 ReadBytes: 24 RequestUnits# 3 ForceFlag# true trace_id# 2026-02-19T08:26:18.789904Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1315:3052] TxId: 281474976715665. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:18.789966Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1315:3052] TxId: 281474976715665. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:18.790156Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1317:3052] TxId: 281474976715666. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:18.790192Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1317:3052] TxId: 281474976715666. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:18.790581Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1321:3052] TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:18.790629Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1321:3052] TxId: 281474976715668. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:18.790883Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1319:3052] TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:18.790924Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1319:3052] TxId: 281474976715667. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:18.791294Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:174} ActorId: [2:1312:3052], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got abort execution. Sender# [0:7307199536658146131:7762515] FromSessionActor# false StatusCode# ABORTED Issues# {
: Error: Test abort before delayed responses } 2026-02-19T08:26:18.791391Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:853} ActorId: [2:1312:3052], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: First error occurred. Status# ABORTED Issues# {
: Error: Test abort before delayed responses } 2026-02-19T08:26:18.791450Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:630} ActorId: [2:1312:3052], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Entering AbortState, trying to finish execution. ActivePartitionsCount# 4 ReturnStatus# ABORTED 2026-02-19T08:26:18.791619Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1315:3052] PartitionIndex# 0 Status# SUCCESS Issues# 2026-02-19T08:26:18.791679Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 3 TotalPartitions# 4 2026-02-19T08:26:18.791769Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Got tx response. Sender# [2:1317:3052] PartitionIndex# 1 Status# SUCCESS Issues# 2026-02-19T08:26:18.791795Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:26:18.791835Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Got tx response. Sender# [2:1321:3052] PartitionIndex# 3 Status# SUCCESS Issues# 2026-02-19T08:26:18.791860Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:26:18.791894Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [2:1319:3052] PartitionIndex# 2 Status# SUCCESS Issues# 2026-02-19T08:26:18.791958Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1312:3052], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# ABORTED Issues# [ {
: Error: Test abort before delayed responses } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:26:18.792525Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:18.792800Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1359:3052] TxId: 281474976715669. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1358:3052] trace_id# 2026-02-19T08:26:18.792952Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1359:3052] TxId: 281474976715669. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:18.792992Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1359:3052] TxId: 281474976715669. Ctx: { TraceId: 01khtg3knd0svcryeevya5rn9t, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWE5ZTg4ZjYtZGFmNDAyYTItYmY2ZGJjMGUtMTM5YjZmNWI=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpLimits::TooBigColumn-useSink [GOOD] >> TxUsage::WriteToTopic_Demo_47_Query >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::KeyedWriteSession_CloseTimeout [GOOD] Test command err: 2026-02-19T08:22:40.818236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488869563756054:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.818275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.895312Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:40.895598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:41.181424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.181542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.187885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.230590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.269879Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.273276Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488869563756019:2081] 1771489360816314 != 1771489360816317 2026-02-19T08:22:41.360948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f3e/r3tmp/yandexub5750.tmp 2026-02-19T08:22:41.360979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f3e/r3tmp/yandexub5750.tmp 2026-02-19T08:22:41.361148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f3e/r3tmp/yandexub5750.tmp 2026-02-19T08:22:41.361227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.389221Z INFO: TTestServer started on Port 17039 GrpcPort 24915 2026-02-19T08:22:41.488558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:24915 2026-02-19T08:22:41.588576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.606778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:22:41.670594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.836095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-02-19T08:22:41.843539Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:43.811028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488882448658928:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.811279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.811293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488882448658942:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.811734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488882448658965:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.811800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.818226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:43.849504Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488882448658966:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:22:44.028422Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488882448659034:2645] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:44.056062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.092965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.183160Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488886743626351:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:44.185326Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=M2ZjNjlhNWMtZTc1YTEwNjItNTYyYjkwNWItMjdlYTgzMA==, ActorId: [1:7608488882448658924:2333], ActorState: ExecuteState, LegacyTraceId: 01khtfx1tqepca2c3jxs34dcq9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:44.187581Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:22:44.190442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488886743626630:2824] 2026-02-19T08:22:45.818643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488869563756054:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.818717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:49.465506Z :ReadBinaryProducerIdFromLowLevelWrite INFO: TTopicSdkTestSetup started 2026-02-19T08:22:49.517693Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:49.548457Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488908218463294:2912] connected; active server actors: 1 2026-02-19T08:22:49.548705Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:49.549808Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:49.549932Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:49.558680Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:49.559741Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_000 ... rsisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 3000000 } max_queue_wait_time { nanos: 3000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-02-19T08:26:18.118257Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] OnAck: seqNo=996, txId=? 2026-02-19T08:26:18.118271Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: acknoledged message 996 2026-02-19T08:26:18.118355Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2026-02-19T08:26:18.118424Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 997 written { offset: 996 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 3000000 } max_queue_wait_time { nanos: 3000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-02-19T08:26:18.118445Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] OnAck: seqNo=997, txId=? 2026-02-19T08:26:18.118462Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: acknoledged message 997 2026-02-19T08:26:18.118579Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2026-02-19T08:26:18.118664Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 998 written { offset: 997 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 3000000 } max_queue_wait_time { nanos: 3000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-02-19T08:26:18.118687Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] OnAck: seqNo=998, txId=? 2026-02-19T08:26:18.118704Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: acknoledged message 998 2026-02-19T08:26:18.118821Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2026-02-19T08:26:18.118891Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 999 written { offset: 998 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 3000000 } max_queue_wait_time { nanos: 3000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-02-19T08:26:18.118907Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] OnAck: seqNo=999, txId=? 2026-02-19T08:26:18.118922Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: acknoledged message 999 2026-02-19T08:26:18.119017Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2026-02-19T08:26:18.119072Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1000 written { offset: 999 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 3000000 } max_queue_wait_time { nanos: 3000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-02-19T08:26:18.119088Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] OnAck: seqNo=1000, txId=? 2026-02-19T08:26:18.119103Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: acknoledged message 1000 2026-02-19T08:26:18.121784Z :INFO: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:26:18.121868Z :INFO: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:26:18.121968Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:26:18.122661Z :INFO: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:18.122728Z :DEBUG: [/Root] TraceId [] SessionId [7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:18.123373Z node 16 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0 grpc read done: success: 0 data: 2026-02-19T08:26:18.123405Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0 grpc read failed 2026-02-19T08:26:18.123783Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 2 sessionId: 7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0 2026-02-19T08:26:18.123809Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 7b16b807-dc025010-3abcac9d-10fc1f23_0|c8f6e11a-cbadcd2e-4f2b59db-b742a532_0 is DEAD 2026-02-19T08:26:18.124320Z node 16 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:18.124476Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [16:7608489797257443012:2474] destroyed 2026-02-19T08:26:18.124519Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:18.124552Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:18.124574Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.124595Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.124618Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.124637Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:18.131243Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:26:18.131281Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.131304Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.131340Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.131359Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][1][StateIdle] Try persist 2026-02-19T08:26:18.131667Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:26:18.131680Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.131688Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.131696Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.131704Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][2][StateIdle] Try persist 2026-02-19T08:26:18.211147Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:18.211193Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.211218Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.211249Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.211270Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:18.231624Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:26:18.231680Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.231712Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.231749Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.231779Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][1][StateIdle] Try persist 2026-02-19T08:26:18.232055Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:26:18.232078Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.232093Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:18.232110Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:18.232122Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][2][StateIdle] Try persist >> KqpBatchDelete::Large_1 >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.885341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.961006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.965524Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.965795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.965925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.294838Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.378315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.378400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.434884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.546507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.166662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.167023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.167051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.167074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.167428Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.228701Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.758553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.330340Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.336106Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.338929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.361132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.361222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.410904Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.412841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.570948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.571068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.572427Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.573107Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.573798Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.574590Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.574910Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.575009Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.575224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.575368Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.575508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.590773Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.795340Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.845785Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.845904Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.889245Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.889503Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.889753Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.889818Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.889887Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.889959Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.890037Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.890100Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.890632Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.896425Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.896537Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.919227Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.920191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.978573Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.981031Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.989740Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.989790Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.989852Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.995679Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.999686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.007637Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.007778Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.021642Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.265721Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.329961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:47.576946Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.709125Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.709213Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.404870Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... de(), pipe client id = [2:5449:4706] 2026-02-19T08:26:17.926288Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5449:4706], server id = [2:5450:4707], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:17.926372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5450:4707] 2026-02-19T08:26:17.926530Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5450:4707], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:17.926582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:17.926696Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:17.926757Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:5447:4704], StatRequests.size() = 1 2026-02-19T08:26:17.926812Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:18.029306Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5437:4694], ActorId: [2:5438:4695], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODI4OGMyY2MtODE4NjU3YzAtM2I2NTZkZDctNmE1MTg1ODM=, TxId: 2026-02-19T08:26:18.029372Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5437:4694], ActorId: [2:5438:4695], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODI4OGMyY2MtODE4NjU3YzAtM2I2NTZkZDctNmE1MTg1ODM=, TxId: 2026-02-19T08:26:18.029654Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5436:4693], ActorId: [2:5437:4694], Got response [2:5438:4695] SUCCESS 2026-02-19T08:26:18.029977Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:18.044236Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-02-19T08:26:18.044317Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:18.133605Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:18.133698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:18.165938Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:5449:4706], schemeshard count = 1 2026-02-19T08:26:18.983311Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:18.983374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:18.983416Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:18.987608Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:19.017612Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:19.018313Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:19.018409Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:19.019620Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:19.056382Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:19.056718Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:19.057606Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5528:4748], server id = [2:5532:4752], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:19.058076Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5528:4748], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.058682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5529:4749], server id = [2:5533:4753], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:19.058754Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5529:4749], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.059700Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5530:4750], server id = [2:5534:4754], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:19.059764Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5530:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.060094Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5531:4751], server id = [2:5535:4755], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:19.060150Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5531:4751], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.066625Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:19.067291Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5528:4748], server id = [2:5532:4752], tablet id = 72075186224037899 2026-02-19T08:26:19.067344Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.068836Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:19.069152Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:19.069655Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5529:4749], server id = [2:5533:4753], tablet id = 72075186224037900 2026-02-19T08:26:19.069688Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.069895Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5530:4750], server id = [2:5534:4754], tablet id = 72075186224037901 2026-02-19T08:26:19.069928Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.070227Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:19.070275Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:19.070446Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:19.070584Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:19.071027Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5548:4764], ActorId: [2:5549:4765], Starting query actor #1 [2:5550:4766] 2026-02-19T08:26:19.071091Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5549:4765], ActorId: [2:5550:4766], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:19.073529Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5531:4751], server id = [2:5535:4755], tablet id = 72075186224037902 2026-02-19T08:26:19.073568Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.074378Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5549:4765], ActorId: [2:5550:4766], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OGIxMDZkYzItNTA3NmUyNmItN2E4NGE0YjgtYmQ3M2Y3NGU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:19.188387Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5559:4775]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:19.188723Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:19.188785Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5559:4775], StatRequests.size() = 1 2026-02-19T08:26:19.300011Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5549:4765], ActorId: [2:5550:4766], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGIxMDZkYzItNTA3NmUyNmItN2E4NGE0YjgtYmQ3M2Y3NGU=, TxId: 2026-02-19T08:26:19.300069Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5549:4765], ActorId: [2:5550:4766], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGIxMDZkYzItNTA3NmUyNmItN2E4NGE0YjgtYmQ3M2Y3NGU=, TxId: 2026-02-19T08:26:19.300350Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5548:4764], ActorId: [2:5549:4765], Got response [2:5550:4766] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:19.300644Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5572:4781]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:19.300818Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:19.301259Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:19.301299Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:19.301892Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:19.301934Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:19.301983Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:19.304645Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.908515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.999254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.005781Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.006048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.006179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.339623Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.436164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.436290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.494200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.590928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.259460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.259883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.259917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.259942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.260366Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.323371Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.844344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.901497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.908173Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.911220Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.936946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.937052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.986244Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.988208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.155435Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.155525Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.156751Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.157362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.157941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.158661Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.159013Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.159108Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.159316Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.159458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.159570Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.174258Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.380179Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.431181Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.431299Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.471766Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.471986Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.472240Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.472313Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.472363Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.472420Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.472490Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.472554Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.473058Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.478576Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.478700Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.505546Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:47.506795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:47.565658Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:47.568947Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.580160Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.580221Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:47.580290Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.584707Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.588140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.599518Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.599653Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.611902Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.835003Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:47.875729Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.169884Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.291176Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.291267Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.881731Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... time: 1771489579095753 2026-02-19T08:26:19.252609Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-02-19T08:26:19.252642Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-02-19T08:26:19.252674Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-02-19T08:26:19.252747Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-02-19T08:26:19.252802Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:19.252888Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-02-19T08:26:19.252950Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:19.253006Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:19.253059Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:19.253207Z node 2 :STATISTICS DEBUG: tx_init.cpp:300: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:19.254267Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:19.254922Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:19.254996Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:19.255105Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5602:4801] Owner: [2:5601:4800]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:19.255163Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5602:4801] Owner: [2:5601:4800]. Column diff is empty, finishing 2026-02-19T08:26:19.256930Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:19.257026Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:19.258585Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:19.278482Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5611:4808] 2026-02-19T08:26:19.278659Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5565:4780], server id = [2:5611:4808], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:19.278998Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5611:4808], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-02-19T08:26:19.279357Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5612:4809] 2026-02-19T08:26:19.279472Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5612:4809], schemeshard id = 72075186224037897 2026-02-19T08:26:19.306478Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:19.306649Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2026-02-19T08:26:19.307214Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5551:4767], server id = [2:5555:4771], tablet id = 72075186224037900 2026-02-19T08:26:19.307258Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.308179Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5617:4813], server id = [2:5621:4817], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:19.308282Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5617:4813], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.308698Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5618:4814], server id = [2:5622:4818], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:19.308753Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5618:4814], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.309918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5619:4815], server id = [2:5623:4819], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:19.309977Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5619:4815], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.310734Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5620:4816], server id = [2:5624:4820], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:19.310788Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5620:4816], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.311640Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:19.312790Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5617:4813], server id = [2:5621:4817], tablet id = 72075186224037899 2026-02-19T08:26:19.312824Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.313454Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:19.313867Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5619:4815], server id = [2:5623:4819], tablet id = 72075186224037901 2026-02-19T08:26:19.313899Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.314381Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:19.314624Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5620:4816], server id = [2:5624:4820], tablet id = 72075186224037902 2026-02-19T08:26:19.314654Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.316678Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:19.316729Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:19.317048Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:19.317261Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:19.317548Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5631:4826], ActorId: [2:5632:4827], Starting query actor #1 [2:5633:4828] 2026-02-19T08:26:19.317624Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5632:4827], ActorId: [2:5633:4828], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:19.320753Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5618:4814], server id = [2:5622:4818], tablet id = 72075186224037900 2026-02-19T08:26:19.320792Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.321737Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5632:4827], ActorId: [2:5633:4828], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWU0YjE3YzctZTUxYTc5YTEtMzhjOWU3YjUtNzkxZjIwNQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:19.429050Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5642:4837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:19.429485Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:19.429550Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5642:4837], StatRequests.size() = 1 2026-02-19T08:26:19.593456Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5632:4827], ActorId: [2:5633:4828], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWU0YjE3YzctZTUxYTc5YTEtMzhjOWU3YjUtNzkxZjIwNQ==, TxId: 2026-02-19T08:26:19.593539Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5632:4827], ActorId: [2:5633:4828], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWU0YjE3YzctZTUxYTc5YTEtMzhjOWU3YjUtNzkxZjIwNQ==, TxId: 2026-02-19T08:26:19.594050Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5631:4826], ActorId: [2:5632:4827], Got response [2:5633:4828] SUCCESS 2026-02-19T08:26:19.594517Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:19.609633Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:19.609730Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:19.688718Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5661:4845]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:19.689007Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:19.689053Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:19.689250Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:19.689288Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:19.689346Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:19.692198Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.615220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.710810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.716557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.716869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.717030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.046502Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.133769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.133855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.191155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.291016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.905876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.906306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.906340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.906381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.906794Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:42.969475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.471462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:45.896105Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.905393Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:45.908550Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.938392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:45.938505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:45.988906Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:45.991066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.174309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.174449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.176172Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.176957Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.177786Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.178740Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.179082Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.179235Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.179456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.179573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.179705Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.194223Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.411949Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.452162Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.452249Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.488540Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.488751Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.489002Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.489063Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.489110Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.489154Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.489201Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.489269Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.489683Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.494872Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.494992Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.518480Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.519541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.582084Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.585133Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.596036Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.596087Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.596160Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.600760Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.604155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.618416Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.618589Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:46.632778Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:46.830918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:46.902805Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.185425Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.307966Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.308047Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:47.898492Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 4] EvFastPropagateCheck 2026-02-19T08:26:18.237948Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:18.303636Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:5469:4711], schemeshard count = 1 2026-02-19T08:26:19.404321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:19.404406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:19.404463Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:19.409646Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:19.441954Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:19.442715Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:19.442809Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:19.444114Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:19.469668Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:19.469990Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:19.470912Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5550:4755], server id = [2:5554:4759], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:19.471285Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5550:4755], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.471470Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5551:4756], server id = [2:5555:4760], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:19.471536Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5551:4756], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.471845Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5552:4757], server id = [2:5556:4761], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:19.471886Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5552:4757], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.472649Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5553:4758], server id = [2:5557:4762], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:19.472695Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5553:4758], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:19.478201Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:19.478546Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5550:4755], server id = [2:5554:4759], tablet id = 72075186224037899 2026-02-19T08:26:19.478583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.479130Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:19.479722Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5551:4756], server id = [2:5555:4760], tablet id = 72075186224037900 2026-02-19T08:26:19.479754Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.480257Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:19.480642Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5552:4757], server id = [2:5556:4761], tablet id = 72075186224037901 2026-02-19T08:26:19.480667Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.481099Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:19.481144Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:19.481296Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:19.481429Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:19.481865Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5570:4771], ActorId: [2:5571:4772], Starting query actor #1 [2:5572:4773] 2026-02-19T08:26:19.481936Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5571:4772], ActorId: [2:5572:4773], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:19.484000Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5553:4758], server id = [2:5557:4762], tablet id = 72075186224037902 2026-02-19T08:26:19.484027Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:19.484711Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5571:4772], ActorId: [2:5572:4773], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YWNhYzRmMjItN2I4MDk0ZjQtOTQ1ZGZiMGYtYjQ2MTgwZjM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:19.576485Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5581:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:19.576683Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:19.576734Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5581:4782], StatRequests.size() = 1 2026-02-19T08:26:19.703402Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5571:4772], ActorId: [2:5572:4773], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWNhYzRmMjItN2I4MDk0ZjQtOTQ1ZGZiMGYtYjQ2MTgwZjM=, TxId: 2026-02-19T08:26:19.703477Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5571:4772], ActorId: [2:5572:4773], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWNhYzRmMjItN2I4MDk0ZjQtOTQ1ZGZiMGYtYjQ2MTgwZjM=, TxId: 2026-02-19T08:26:19.703772Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5570:4771], ActorId: [2:5571:4772], Got response [2:5572:4773] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:19.704244Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:19.705326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2853:3044];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:997;event=tablet_die; 2026-02-19T08:26:19.758629Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:19.758691Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:19.824752Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5601:4792];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2026-02-19T08:26:19.834679Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: true ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2026-02-19T08:26:19.835181Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-02-19T08:26:19.835560Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: true ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-02-19T08:26:20.060815Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5712:4885]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:20.061236Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:20.061307Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:20.061792Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:20.061864Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:20.061933Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:20.065917Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpBatchUpdate::TableWithVectorIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: 2026-02-19T08:19:31.626468Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488055088848965:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:31.629449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:19:32.110741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:19:32.132301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:19:32.133598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:19:32.227271Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:19:32.234115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488055088848720:2081] 1771489171549174 != 1771489171549177 2026-02-19T08:19:32.253876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:19:32.415394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:19:32.430482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:19:32.430504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:19:32.430510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:19:32.430590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:19:32.597947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:19:33.075996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:19:33.091522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:19:33.173396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:33.340088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:33.530863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:33.620536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:35.664038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488072268719783:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.664165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.665553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488072268719793:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:35.665617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.003063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.053316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.087217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.123164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.165696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.233002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.266410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.323663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:19:36.417223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488076563687963:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.417309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.417606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488076563687968:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.417639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488076563687969:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.417791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:19:36.421933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:19:36.439715Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488076563687972:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:19:36.518808Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488076563688023:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:19:36.622566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488055088848965:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:19:36.622653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:19:38.257724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... T08:26:12.128044Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:12.160168Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:12.160193Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:12.160198Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:12.160260Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:12.302803Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:12.590919Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:12.598664Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:12.647340Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:12.786384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:12.863402Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:13.030202Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:15.884772Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489791649576430:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:15.884885Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:15.885140Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489791649576439:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:15.885203Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:15.970380Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.002511Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.035755Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.066724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.099747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.131827Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.165974Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.209647Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:16.281085Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489795944544603:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:16.281148Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489795944544608:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:16.281162Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:16.281336Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489795944544610:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:16.281375Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:16.285184Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:16.296191Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608489795944544611:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:26:16.373184Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608489795944544663:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:17.008399Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608489778764672694:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:17.008470Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:26:19.549793Z node 5 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:186: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2026-02-19T08:26:19.549917Z node 5 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710673 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2026-02-19T08:26:19.550070Z node 5 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:890} ActorId: [5:7608489808829446862:2528] TxId: 281474976710673. Ctx: { TraceId: 01khtg3kyqf53n0senfzhadg2t, Database: /Root, SessionId: ydb://session/3?node_id=5&id=N2I5MjZkMTEtNmM5Nzk4MWItYzliNTU5ZDQtZGY1ZWExMzc=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; trace_id# 2026-02-19T08:26:19.550532Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=N2I5MjZkMTEtNmM5Nzk4MWItYzliNTU5ZDQtZGY1ZWExMzc=, ActorId: [5:7608489804534479544:2528], ActorState: ExecuteState, LegacyTraceId: 01khtg3kyqf53n0senfzhadg2t, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> KqpBatchUpdate::SimplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:42.168121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:42.276214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.284473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.284832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.285007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.675863Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.758393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.758540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.816296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.905875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.547714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.547781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.547815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.550494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.550954Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.616982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:44.129934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.068479Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.075890Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.079734Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.111980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.112118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.150337Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.151817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.322235Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.322340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.323838Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.324624Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.325380Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.325928Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.326160Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.326305Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.326548Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.326686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.326785Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.342118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.548587Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.572070Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.572147Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.610532Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.611900Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.612113Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.612183Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.612243Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.612291Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.612340Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.612396Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.612839Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.627666Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.627805Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2143:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.656075Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2648] 2026-02-19T08:25:47.656422Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2648], schemeshard id = 72075186224037897 2026-02-19T08:25:47.714390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2670] 2026-02-19T08:25:47.716724Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.729102Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.729165Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Creating table 2026-02-19T08:25:47.729244Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.735458Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.739472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.746818Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.746967Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.759976Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.981906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.010091Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.297396Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.412351Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.412445Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2675] Owner: [2:2271:2674]. Column diff is empty, finishing 2026-02-19T08:25:49.187741Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... de(), pipe client id = [2:5462:4707] 2026-02-19T08:26:19.372472Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5463:4708] 2026-02-19T08:26:19.372650Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5462:4707], server id = [2:5463:4708], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:19.372715Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5463:4708], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:19.372788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:19.372947Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:19.373029Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:5460:4705], StatRequests.size() = 1 2026-02-19T08:26:19.373103Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:19.512899Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5450:4695], ActorId: [2:5451:4696], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDdjZTk1MDQtNDNkYWVjOWYtZTJkMjc1ZjktZjM3NDk5NGI=, TxId: 2026-02-19T08:26:19.512987Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5450:4695], ActorId: [2:5451:4696], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDdjZTk1MDQtNDNkYWVjOWYtZTJkMjc1ZjktZjM3NDk5NGI=, TxId: 2026-02-19T08:26:19.513396Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5449:4694], ActorId: [2:5450:4695], Got response [2:5451:4696] SUCCESS 2026-02-19T08:26:19.513677Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:19.539152Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-02-19T08:26:19.539213Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:19.642260Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:19.642343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:19.696129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:5462:4707], schemeshard count = 1 2026-02-19T08:26:20.714769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:20.714833Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:20.714867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:20.719164Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:20.748198Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:20.748747Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:20.748836Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:20.750004Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:20.774786Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:20.775068Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:20.776226Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5543:4751], server id = [2:5547:4755], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:20.776706Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5543:4751], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.777041Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5544:4752], server id = [2:5548:4756], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:20.777094Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5544:4752], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.777219Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5545:4753], server id = [2:5549:4757], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:20.777263Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5545:4753], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.778055Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5546:4754], server id = [2:5550:4758], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:20.778111Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5546:4754], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.782973Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:20.783457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5543:4751], server id = [2:5547:4755], tablet id = 72075186224037899 2026-02-19T08:26:20.783502Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.785100Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:20.785987Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5545:4753], server id = [2:5549:4757], tablet id = 72075186224037901 2026-02-19T08:26:20.786020Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.786404Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:20.786682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5544:4752], server id = [2:5548:4756], tablet id = 72075186224037900 2026-02-19T08:26:20.786709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.787003Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:20.787048Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:20.787189Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:20.787316Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:20.787682Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5563:4767], ActorId: [2:5564:4768], Starting query actor #1 [2:5565:4769] 2026-02-19T08:26:20.787750Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5564:4768], ActorId: [2:5565:4769], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:20.789921Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5546:4754], server id = [2:5550:4758], tablet id = 72075186224037902 2026-02-19T08:26:20.789954Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.790686Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5564:4768], ActorId: [2:5565:4769], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTJhOWFlZTgtZWNlNTU1MzYtNGQ4OGZkNTctMjI2NmI5YjA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:20.894930Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5574:4778]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:20.895284Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:20.895330Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5574:4778], StatRequests.size() = 1 2026-02-19T08:26:21.038496Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5564:4768], ActorId: [2:5565:4769], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTJhOWFlZTgtZWNlNTU1MzYtNGQ4OGZkNTctMjI2NmI5YjA=, TxId: 2026-02-19T08:26:21.038566Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5564:4768], ActorId: [2:5565:4769], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTJhOWFlZTgtZWNlNTU1MzYtNGQ4OGZkNTctMjI2NmI5YjA=, TxId: 2026-02-19T08:26:21.039006Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5563:4767], ActorId: [2:5564:4768], Got response [2:5565:4769] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:21.039426Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5587:4784]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:21.039640Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:21.040185Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:21.040240Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:21.040898Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:21.040941Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:21.041004Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:21.044657Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpBatchUpdate::TableWithSyncIndex+EnableIndexStreamWrite >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> KqpBatchPEA::PrepareState_UnknownEvent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.230319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.334533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.339804Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.340114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.340263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:41.716096Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:41.802598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:41.802725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:41.861768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:41.966844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.588607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.589147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.589193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.589225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.589798Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:42.653928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.202871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.129196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.136047Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.139372Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.163506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.163623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.213055Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.214893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.384356Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.384481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.386194Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.387006Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.387769Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.388350Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.388563Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.388634Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.388787Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.388897Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.389021Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.403554Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.585537Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.630967Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.631054Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.665293Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.665507Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.665673Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.665747Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.665792Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.665835Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.665882Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.665934Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.666511Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.670966Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.671087Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.695338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.696559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.754667Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.757257Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.767528Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.767620Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.767716Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.773233Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.776415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.782524Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.782641Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:46.793807Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:46.980337Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:47.052676Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.323073Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.423033Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.423106Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.045338Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... [2:5465:4717], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:19.130565Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:19.130685Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:19.130747Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:5462:4714], StatRequests.size() = 1 2026-02-19T08:26:19.130805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:19.242671Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5452:4704], ActorId: [2:5453:4705], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDc2OWI1MjUtYmRlZjJlYjktOGY2NTkwMzUtMzY3OTAyNjQ=, TxId: 2026-02-19T08:26:19.242760Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5452:4704], ActorId: [2:5453:4705], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDc2OWI1MjUtYmRlZjJlYjktOGY2NTkwMzUtMzY3OTAyNjQ=, TxId: 2026-02-19T08:26:19.242979Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5451:4703], ActorId: [2:5452:4704], Got response [2:5453:4705] SUCCESS 2026-02-19T08:26:19.243314Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:19.257024Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-02-19T08:26:19.257103Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:19.336470Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:19.336543Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:19.413199Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:5464:4716], schemeshard count = 1 2026-02-19T08:26:20.434021Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:20.434096Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:20.434136Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:20.439126Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:20.470242Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:20.470934Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:20.471019Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:20.472421Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2026-02-19T08:26:20.472487Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:63: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2026-02-19T08:26:20.472548Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:21.521785Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:21.536378Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:21.536604Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:21.538033Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5574:4770], server id = [2:5578:4774], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:21.538470Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5574:4770], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:21.539453Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5575:4771], server id = [2:5579:4775], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:21.539515Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5575:4771], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:21.539862Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5576:4772], server id = [2:5580:4776], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:21.539904Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5576:4772], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:21.539995Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5577:4773], server id = [2:5581:4777], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:21.540025Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5577:4773], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:21.546586Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:21.547383Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5574:4770], server id = [2:5578:4774], tablet id = 72075186224037899 2026-02-19T08:26:21.547442Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:21.548036Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:21.548527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5575:4771], server id = [2:5579:4775], tablet id = 72075186224037900 2026-02-19T08:26:21.548565Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:21.550183Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:21.550502Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5577:4773], server id = [2:5581:4777], tablet id = 72075186224037902 2026-02-19T08:26:21.550527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:21.550863Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:21.550907Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:21.551048Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:21.551219Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:21.551591Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5594:4786], ActorId: [2:5595:4787], Starting query actor #1 [2:5596:4788] 2026-02-19T08:26:21.551642Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5595:4787], ActorId: [2:5596:4788], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:21.553489Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5576:4772], server id = [2:5580:4776], tablet id = 72075186224037901 2026-02-19T08:26:21.553513Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:21.554178Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5595:4787], ActorId: [2:5596:4788], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YzFmMTJhMzctNTlmMDJhMTEtMjYyYzBhOGYtYzE5ODU3NzE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:21.647564Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5605:4797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:21.647857Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:21.647914Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5605:4797], StatRequests.size() = 1 2026-02-19T08:26:21.802728Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5595:4787], ActorId: [2:5596:4788], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzFmMTJhMzctNTlmMDJhMTEtMjYyYzBhOGYtYzE5ODU3NzE=, TxId: 2026-02-19T08:26:21.802810Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5595:4787], ActorId: [2:5596:4788], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzFmMTJhMzctNTlmMDJhMTEtMjYyYzBhOGYtYzE5ODU3NzE=, TxId: 2026-02-19T08:26:21.803199Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5594:4786], ActorId: [2:5595:4787], Got response [2:5596:4788] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:21.803667Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5618:4803]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:21.803929Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:21.804538Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:21.804597Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:21.804941Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:21.804984Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:21.805047Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:21.811425Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpBatchDelete::SimplePartitions >> KqpBatchUpdate::HasTxControl >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> KqpBatchUpdate::TableWithSyncIndex-EnableIndexStreamWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.584168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.695591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.703669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.704036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.704231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.056709Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.152793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.152924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.210707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.318192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.943855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.944308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.944351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.944376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.944833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.007736Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.551302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.855123Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.864031Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.868585Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.900049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.900175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.949287Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.951290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.116630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.116711Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.117952Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.118506Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.119045Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.119654Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.119872Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.119932Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.120107Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.120213Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.120388Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.134959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.315386Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.361780Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.361852Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.392622Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.392855Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.393096Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.393195Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.393265Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.393326Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.393404Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.393466Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.393993Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.399500Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.399622Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.424603Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:25:47.425461Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:25:47.497896Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:25:47.500099Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.512866Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.512932Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:47.513030Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.519725Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.524047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.538253Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.538407Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.599865Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.794708Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.877464Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.121469Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.239445Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.239530Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.953240Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... oaded traversal table owner id: 72075186224037897 2026-02-19T08:26:22.818781Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-02-19T08:26:22.818820Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1771489582687730 2026-02-19T08:26:22.818859Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-02-19T08:26:22.818898Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-02-19T08:26:22.818937Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-02-19T08:26:22.819030Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-02-19T08:26:22.819109Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:22.819202Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-02-19T08:26:22.819272Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:22.819332Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:22.819392Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:22.819539Z node 2 :STATISTICS DEBUG: tx_init.cpp:300: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:22.820763Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:22.821188Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:22.821265Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:22.821369Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5597:4795] Owner: [2:5596:4794]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:22.821427Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5597:4795] Owner: [2:5596:4794]. Column diff is empty, finishing 2026-02-19T08:26:22.823010Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:22.823089Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:22.824903Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:22.842871Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5606:4802] 2026-02-19T08:26:22.843202Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5606:4802], schemeshard id = 72075186224037897 2026-02-19T08:26:22.843316Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5563:4775], server id = [2:5607:4803], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:22.843438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5607:4803] 2026-02-19T08:26:22.843532Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5607:4803], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-02-19T08:26:22.925978Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:22.926128Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-02-19T08:26:22.926855Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5612:4808], server id = [2:5616:4812], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:22.926973Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5612:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:22.927472Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5613:4809], server id = [2:5617:4813], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:22.927532Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5613:4809], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:22.928048Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5614:4810], server id = [2:5618:4814], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:22.928107Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5614:4810], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:22.929175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5615:4811], server id = [2:5619:4815], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:22.929220Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5615:4811], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:22.930275Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:22.930733Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5612:4808], server id = [2:5616:4812], tablet id = 72075186224037899 2026-02-19T08:26:22.930772Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:22.931194Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:22.931474Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:22.931652Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5613:4809], server id = [2:5617:4813], tablet id = 72075186224037900 2026-02-19T08:26:22.931681Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:22.931909Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:22.931955Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:22.932177Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5614:4810], server id = [2:5618:4814], tablet id = 72075186224037901 2026-02-19T08:26:22.932205Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:22.932317Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:22.932453Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:22.932685Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5624:4820], ActorId: [2:5625:4821], Starting query actor #1 [2:5626:4822] 2026-02-19T08:26:22.932747Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5625:4821], ActorId: [2:5626:4822], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:22.935869Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5615:4811], server id = [2:5619:4815], tablet id = 72075186224037902 2026-02-19T08:26:22.935901Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:22.936568Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5625:4821], ActorId: [2:5626:4822], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWE3MmE5N2EtYzZmMmFiNmEtZjUwYTM0ODgtYjg2NWZmZDU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:23.023958Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5635:4831]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:23.024205Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:23.024244Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5635:4831], StatRequests.size() = 1 2026-02-19T08:26:23.130981Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5625:4821], ActorId: [2:5626:4822], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWE3MmE5N2EtYzZmMmFiNmEtZjUwYTM0ODgtYjg2NWZmZDU=, TxId: 2026-02-19T08:26:23.131068Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5625:4821], ActorId: [2:5626:4822], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWE3MmE5N2EtYzZmMmFiNmEtZjUwYTM0ODgtYjg2NWZmZDU=, TxId: 2026-02-19T08:26:23.131534Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5624:4820], ActorId: [2:5625:4821], Got response [2:5626:4822] SUCCESS 2026-02-19T08:26:23.131961Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:23.147129Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:23.147202Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:23.215646Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5654:4839]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:23.216023Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:23.216087Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:23.216380Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:23.216424Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:23.216481Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:23.219901Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> AnalyzeDatashard::DropTableNavigateError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:42.231171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:42.313058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.317840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.318093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.318249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.643866Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.729864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.729958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.787587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.908090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.493910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.493955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.493976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.495837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.496171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.558663Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:44.098570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.204979Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.210389Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.213053Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.236771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.236868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.274817Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.276581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.453309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.453438Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.455144Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.456039Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.456853Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.457391Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.457651Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.457836Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.458095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.458287Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.458392Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.474296Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.684515Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.738271Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.738371Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.783042Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.783267Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.783498Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.783563Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.783617Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.783672Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.783763Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.783831Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.784356Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.790136Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.790293Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.817398Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2629] 2026-02-19T08:25:47.818721Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2185:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:47.881678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2266:2670] 2026-02-19T08:25:47.884888Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.896219Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.896291Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:47.896382Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.902353Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.906608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.914508Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.914645Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.928211Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:48.154148Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.173816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.478464Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.611365Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.611435Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:49.311036Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... rver id = [2:5554:4767], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:23.293305Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5550:4763], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:23.298577Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:23.298979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5547:4760], server id = [2:5551:4764], tablet id = 72075186224037899 2026-02-19T08:26:23.299023Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:23.299635Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:23.300120Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5548:4761], server id = [2:5552:4765], tablet id = 72075186224037900 2026-02-19T08:26:23.300151Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:23.300555Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:23.301013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5549:4762], server id = [2:5553:4766], tablet id = 72075186224037901 2026-02-19T08:26:23.301043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:23.301419Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:23.301453Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:23.301570Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:23.301692Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:23.302072Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5567:4776], ActorId: [2:5568:4777], Starting query actor #1 [2:5569:4778] 2026-02-19T08:26:23.302159Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5568:4777], ActorId: [2:5569:4778], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:23.304633Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5550:4763], server id = [2:5554:4767], tablet id = 72075186224037902 2026-02-19T08:26:23.304666Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:23.305445Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5568:4777], ActorId: [2:5569:4778], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjRjNzlhMjktNDNiM2Y0OTItYmMxYmRlOGUtYjgzODYyNmY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:23.405003Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5578:4787]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:23.405335Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:23.405378Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5578:4787], StatRequests.size() = 1 ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER cookie 0 ... waiting for 3rd TEvResolveKeySetResult (done) 2026-02-19T08:26:23.502793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5466:4716], server id = [2:5467:4717], tablet id = 72075186224037894 2026-02-19T08:26:23.502898Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:5584:4790] 2026-02-19T08:26:23.502945Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:5584:4790] 2026-02-19T08:26:23.503097Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:5585:4791], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:23.564742Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:23.564849Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:23.565243Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:23.565974Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:23.566209Z node 2 :STATISTICS DEBUG: tx_init.cpp:55: [72075186224037894] Loaded database: /Root/Database 2026-02-19T08:26:23.566245Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2026-02-19T08:26:23.566274Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2026-02-19T08:26:23.566304Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-02-19T08:26:23.566338Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1771489583224588 2026-02-19T08:26:23.566368Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-02-19T08:26:23.566394Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-02-19T08:26:23.566417Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-02-19T08:26:23.566491Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-02-19T08:26:23.566548Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:23.566636Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-02-19T08:26:23.566694Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:23.566734Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:23.566774Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:23.566874Z node 2 :STATISTICS DEBUG: tx_init.cpp:300: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:23.567732Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:23.569267Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:23.569355Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:23.569465Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5619:4811] Owner: [2:5618:4810]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:23.569513Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5619:4811] Owner: [2:5618:4810]. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2026-02-19T08:26:23.588746Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5628:4818] 2026-02-19T08:26:23.588891Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5584:4790], server id = [2:5628:4818], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:23.589128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5628:4818], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-02-19T08:26:23.589353Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5629:4819] 2026-02-19T08:26:23.589445Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5629:4819], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to KQP_TABLE_RESOLVER ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2026-02-19T08:26:23.751552Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:23.751662Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:23.755473Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:23.777076Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5568:4777], ActorId: [2:5569:4778], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjRjNzlhMjktNDNiM2Y0OTItYmMxYmRlOGUtYjgzODYyNmY=, TxId: 2026-02-19T08:26:23.777167Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5568:4777], ActorId: [2:5569:4778], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjRjNzlhMjktNDNiM2Y0OTItYmMxYmRlOGUtYjgzODYyNmY=, TxId: 2026-02-19T08:26:23.777614Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5567:4776], ActorId: [2:5568:4777], Got response [2:5569:4778] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:23.778150Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5644:4825]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:23.778513Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:23.778567Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:23.779228Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:23.779267Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:23.779317Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:23.782978Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpBatchUpdate::TableWithUniqueSyncIndex-EnableIndexStreamWrite >> KqpBatchUpdate::TableWithVectorIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:45.833863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.944805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.950180Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:45.950480Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:45.950628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:46.336050Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.428438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.428575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.486790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.603838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:47.245167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:47.245228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:47.245262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:47.248209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:47.248651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:47.314830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:47.880280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:50.610377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:50.616165Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:50.619540Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:50.651039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.651167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.690225Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:50.692035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.873133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.873236Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.874520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.875143Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.875718Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876099Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876289Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876374Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876571Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876682Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.876747Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.891428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:51.078776Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:51.130034Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:51.130116Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:51.161413Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:51.161606Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:51.161804Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:51.161863Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:51.161924Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:51.161974Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:51.162022Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:51.162075Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:51.162502Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:51.166856Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:51.166948Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:51.187876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2629] 2026-02-19T08:25:51.188903Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2185:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:51.247149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2266:2670] 2026-02-19T08:25:51.250576Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-02-19T08:25:51.261769Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:51.261837Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:51.261928Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-02-19T08:25:51.268111Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:51.272610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:51.280626Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:51.280767Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:51.294684Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:51.498633Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-02-19T08:25:51.539559Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:51.823206Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:51.945650Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:51.945743Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:52.541375Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... vice_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:21.880135Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:6184:5116], StatRequests.size() = 1 2026-02-19T08:26:21.880191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:22.002325Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6174:5106], ActorId: [2:6175:5107], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmMzMTlhZWUtMjQ0ZTI2ODMtZDM0MWE4NWUtZTBlZjJlNDU=, TxId: 2026-02-19T08:26:22.002382Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6174:5106], ActorId: [2:6175:5107], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmMzMTlhZWUtMjQ0ZTI2ODMtZDM0MWE4NWUtZTBlZjJlNDU=, TxId: 2026-02-19T08:26:22.002693Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6173:5105], ActorId: [2:6174:5106], Got response [2:6175:5107] SUCCESS 2026-02-19T08:26:22.002963Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:22.016999Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-02-19T08:26:22.017050Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:22.197408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:22.197496Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:22.280364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:6186:5118], schemeshard count = 1 2026-02-19T08:26:23.211804Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:23.776021Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-02-19T08:26:23.776097Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.424000s, at schemeshard: 72075186224037899 2026-02-19T08:26:23.776417Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-02-19T08:26:23.790924Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-02-19T08:26:24.303102Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:24.303161Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 32] is column table. 2026-02-19T08:26:24.303190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-02-19T08:26:24.306714Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:24.324612Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:24.325248Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:24.325330Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:24.326731Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:24.341189Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:24.341525Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:24.342839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:6302:5180], server id = [2:6306:5184], tablet id = 72075186224037905, status = OK 2026-02-19T08:26:24.343094Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:6302:5180], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-02-19T08:26:24.343315Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:6303:5181], server id = [2:6307:5185], tablet id = 72075186224037906, status = OK 2026-02-19T08:26:24.343352Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:6303:5181], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-02-19T08:26:24.344057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:6304:5182], server id = [2:6308:5186], tablet id = 72075186224037907, status = OK 2026-02-19T08:26:24.344105Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:6304:5182], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-02-19T08:26:24.345022Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:6305:5183], server id = [2:6309:5187], tablet id = 72075186224037908, status = OK 2026-02-19T08:26:24.345061Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:6305:5183], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-02-19T08:26:24.348403Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037905 2026-02-19T08:26:24.348847Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:6302:5180], server id = [2:6306:5184], tablet id = 72075186224037905 2026-02-19T08:26:24.348879Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:24.349600Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037906 2026-02-19T08:26:24.349949Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:6303:5181], server id = [2:6307:5185], tablet id = 72075186224037906 2026-02-19T08:26:24.349972Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:24.350816Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037907 2026-02-19T08:26:24.351096Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:6304:5182], server id = [2:6308:5186], tablet id = 72075186224037907 2026-02-19T08:26:24.351127Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:24.351361Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037908 2026-02-19T08:26:24.351400Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:24.351530Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:24.351658Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:24.352066Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6322:5196], ActorId: [2:6323:5197], Starting query actor #1 [2:6324:5198] 2026-02-19T08:26:24.352129Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6323:5197], ActorId: [2:6324:5198], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-02-19T08:26:24.353738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:6305:5183], server id = [2:6309:5187], tablet id = 72075186224037908 2026-02-19T08:26:24.353770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:24.354377Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6323:5197], ActorId: [2:6324:5198], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjJkZGMxMzktY2FmMTcwNGUtY2ZmNzJiLWYyOGUyM2Iy, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:24.444969Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6333:5207]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:24.445191Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:24.445236Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:6333:5207], StatRequests.size() = 1 2026-02-19T08:26:24.550963Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6323:5197], ActorId: [2:6324:5198], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjJkZGMxMzktY2FmMTcwNGUtY2ZmNzJiLWYyOGUyM2Iy, TxId: 2026-02-19T08:26:24.551039Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6323:5197], ActorId: [2:6324:5198], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjJkZGMxMzktY2FmMTcwNGUtY2ZmNzJiLWYyOGUyM2Iy, TxId: 2026-02-19T08:26:24.551373Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6322:5196], ActorId: [2:6323:5197], Got response [2:6324:5198] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:24.551746Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6346:5213]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:24.551934Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:24.552382Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:24.552426Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:24.553089Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:24.553127Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ] 2026-02-19T08:26:24.553174Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:24.557928Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:45.815240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.930503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.936752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:45.937078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:45.937235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:46.308448Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.403181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.403315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.460158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.508113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:47.204161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:47.204825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:47.204873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:47.204910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:47.205566Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:47.269682Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:47.824824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:50.368033Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:50.374153Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:50.376707Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:50.398021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.398107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.445952Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:50.448017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.603182Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.603258Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.604351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.604915Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.605429Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606026Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606302Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606501Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.606739Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.621485Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.795900Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:50.840229Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:50.840318Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:50.877613Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:50.877827Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:50.878029Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:50.878118Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:50.878191Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:50.878261Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:50.878326Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:50.878377Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:50.878888Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:50.882971Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.883051Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.903168Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:25:50.903812Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:25:50.969059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:25:50.970754Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:50.980593Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:50.980649Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:50.980717Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:50.985502Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:50.988646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:50.993886Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:50.993979Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:51.004125Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:51.233822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:51.260298Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:51.550582Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:51.680490Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:51.680572Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:52.388341Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... : EvRequestTimeout, pipe client id = [2:5455:4706], schemeshard count = 1 2026-02-19T08:26:20.811026Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:20.811100Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:20.811143Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:20.816779Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:20.846366Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:20.846967Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:20.847053Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:20.848162Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:20.872959Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:20.873237Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:20.874144Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5536:4750], server id = [2:5540:4754], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:20.874587Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5536:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.874888Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5537:4751], server id = [2:5541:4755], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:20.874946Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5537:4751], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.876477Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5538:4752], server id = [2:5543:4757], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:20.876540Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5538:4752], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.877117Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5539:4753], server id = [2:5542:4756], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:20.877176Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5539:4753], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:20.882621Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:20.883576Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5536:4750], server id = [2:5540:4754], tablet id = 72075186224037899 2026-02-19T08:26:20.883631Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.884095Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:20.884479Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5537:4751], server id = [2:5541:4755], tablet id = 72075186224037900 2026-02-19T08:26:20.884512Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.884656Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:20.885087Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5538:4752], server id = [2:5543:4757], tablet id = 72075186224037901 2026-02-19T08:26:20.885118Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.885208Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:20.885260Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:20.885400Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:20.885798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5539:4753], server id = [2:5542:4756], tablet id = 72075186224037902 2026-02-19T08:26:20.885846Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:20.910491Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:20.910754Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-02-19T08:26:21.429985Z node 2 :STATISTICS DEBUG: service_impl.cpp:251: Event round 2 is different from the current 3 2026-02-19T08:26:21.430079Z node 2 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:26:24.131246Z node 2 :STATISTICS INFO: service_impl.cpp:415: Node 3 is unavailable 2026-02-19T08:26:24.131351Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:24.131543Z node 2 :STATISTICS DEBUG: service_impl.cpp:251: Event round 3 is different from the current 0 2026-02-19T08:26:24.131581Z node 2 :STATISTICS DEBUG: service_impl.cpp:378: Skip TEvDispatchKeepAlive 2026-02-19T08:26:24.131676Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:24.131742Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:24.132363Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:24.147294Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:24.147526Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2026-02-19T08:26:24.148215Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5658:4811], server id = [2:5659:4812], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:24.148313Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5658:4811], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:24.149602Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:24.149668Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:24.149852Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:24.150001Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:24.150466Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5661:4814], ActorId: [2:5662:4815], Starting query actor #1 [2:5663:4816] 2026-02-19T08:26:24.150536Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5662:4815], ActorId: [2:5663:4816], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:24.152979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5658:4811], server id = [2:5659:4812], tablet id = 72075186224037900 2026-02-19T08:26:24.153016Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:24.153835Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5662:4815], ActorId: [2:5663:4816], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmEyNjc0OTgtNWVhYTliOTItN2M2ODBmNDQtZGIxYjM2MWM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:24.266690Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5672:4825]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:24.267016Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:24.267071Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5672:4825], StatRequests.size() = 1 2026-02-19T08:26:24.432838Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5662:4815], ActorId: [2:5663:4816], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmEyNjc0OTgtNWVhYTliOTItN2M2ODBmNDQtZGIxYjM2MWM=, TxId: 2026-02-19T08:26:24.432912Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5662:4815], ActorId: [2:5663:4816], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmEyNjc0OTgtNWVhYTliOTItN2M2ODBmNDQtZGIxYjM2MWM=, TxId: 2026-02-19T08:26:24.433261Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5661:4814], ActorId: [2:5662:4815], Got response [2:5663:4816] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:24.433689Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5685:4831]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:24.433952Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:24.434470Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:24.434526Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:24.435192Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:24.435234Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:24.435291Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:24.440628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> IncrementalBackup::ComplexBackupSequenceWithDataVerification [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:47.841177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.936828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.945020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:47.945363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:47.945528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:48.328480Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:48.406896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:48.407008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:48.463882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:48.556891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:49.205025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:49.205526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:49.205564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:49.205594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:49.206180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:49.271196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:49.799132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:52.462518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:52.468495Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:52.471095Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:52.491738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.491823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.540029Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:52.541612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.696255Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.696338Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.697518Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.698104Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.698618Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699234Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699445Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699530Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699706Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699831Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.699961Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.714089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.900394Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:52.951306Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:52.951410Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:52.986127Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:52.986363Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:52.986586Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:52.986643Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:52.986695Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:52.986751Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:52.986830Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:52.986892Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:52.987416Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:52.992774Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:52.992872Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:53.013001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:53.013853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:53.079805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2263:2669] 2026-02-19T08:25:53.082552Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:53.094619Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:53.094686Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:53.094767Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:53.101492Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:53.105686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:53.123088Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:53.123224Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:53.136552Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:53.199283Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:53.351349Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:53.636849Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:53.774431Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:53.774525Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:54.522329Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 5:56.151778Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:25:56.326487Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3181:3262] 2026-02-19T08:25:56.329474Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3179:3472] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-02-19T08:25:56.329566Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: "operationId", DatabaseName: `', Types: 1 2026-02-19T08:25:56.329616Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: "operationId", PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-02-19T08:25:56.426043Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:112: [72075186224037894] TTxAnalyze::Complete 2026-02-19T08:25:56.460231Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037899 not found 2026-02-19T08:25:56.461051Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037900 not found 2026-02-19T08:25:56.461318Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037902 not found 2026-02-19T08:25:56.461405Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037901 not found 2026-02-19T08:25:56.535416Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:25:56.535489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:25:56.609860Z node 1 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [1:3083:3448], schemeshard count = 1 2026-02-19T08:25:59.990489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:25:59.990833Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 1 2026-02-19T08:25:59.991082Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-02-19T08:26:02.842186Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-02-19T08:26:04.853900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:26:04.854295Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 2 2026-02-19T08:26:04.854538Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 2 2026-02-19T08:26:07.705076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:26:07.705146Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:07.727596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-02-19T08:26:09.756151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:26:09.756206Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:09.823225Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:26:09.823604Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 3 2026-02-19T08:26:09.823861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-02-19T08:26:12.751650Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-02-19T08:26:14.709890Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:26:14.710290Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 4 2026-02-19T08:26:14.710459Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-02-19T08:26:17.713762Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-02-19T08:26:19.602431Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:26:19.602715Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 5 2026-02-19T08:26:19.602853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-02-19T08:26:22.501636Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-02-19T08:26:22.602507Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-02-19T08:26:22.602573Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8557: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:22.602602Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8588: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:22.602643Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-02-19T08:26:24.838501Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:26:24.838868Z node 1 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 1 cookie: 6 2026-02-19T08:26:24.839128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-02-19T08:26:24.850230Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-02-19T08:26:24.850315Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-02-19T08:26:24.850612Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-02-19T08:26:24.864561Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-02-19T08:26:24.886444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:26:24.886548Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:678: [72075186224037894] ScheduleNextAnalyze. table [OwnerId: 72075186224037897, LocalPathId: 34] was deleted, deleting its statistics 2026-02-19T08:26:24.886971Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4082:3603], ActorId: [2:4083:3604], Starting query actor #1 [2:4084:3605] 2026-02-19T08:26:24.887046Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4083:3604], ActorId: [2:4084:3605], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:24.890701Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4083:3604], ActorId: [2:4084:3605], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTY0MzA1YjEtNDNmMzJiNTAtZDM3YTZkMWMtN2YzMzc2MjQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-02-19T08:26:24.953031Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4093:3614]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:24.953328Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:24.953441Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4095:3616] 2026-02-19T08:26:24.953502Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4095:3616] 2026-02-19T08:26:24.954010Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4095:3616], server id = [2:4096:3617], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:24.954084Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4096:3617] 2026-02-19T08:26:24.954181Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4096:3617], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:24.954242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:24.954381Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:24.954456Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4093:3614], StatRequests.size() = 1 2026-02-19T08:26:24.954563Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:25.062256Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4083:3604], ActorId: [2:4084:3605], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTY0MzA1YjEtNDNmMzJiNTAtZDM3YTZkMWMtN2YzMzc2MjQ=, TxId: 2026-02-19T08:26:25.062334Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4083:3604], ActorId: [2:4084:3605], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTY0MzA1YjEtNDNmMzJiNTAtZDM3YTZkMWMtN2YzMzc2MjQ=, TxId: 2026-02-19T08:26:25.062611Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4082:3603], ActorId: [2:4083:3604], Got response [2:4084:3605] SUCCESS 2026-02-19T08:26:25.062825Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:25.076571Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:25.076663Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3179:3472] 2026-02-19T08:26:25.077572Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4115:3628]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:26:25.080243Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:25.080296Z node 2 :STATISTICS ERROR: service_impl.cpp:731: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2026-02-19T08:26:25.080335Z node 2 :STATISTICS DEBUG: service_impl.cpp:1268: ReplyFailed(), request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithVectorIndex [GOOD] Test command err: 2026-02-19T08:26:22.776519Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489821376382349:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:22.776584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:22.985934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:22.986019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:23.020562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:23.023927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:23.067286Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489821376382318:2081] 1771489582774845 != 1771489582774848 2026-02-19T08:26:23.072337Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:23.111481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:23.111506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:23.111514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:23.111636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:23.177308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:25.411242Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a6/r3tmp/spilling-tmp-runner/node_1_38b15cd9-44fa7496-48ced055-7ce6c3a1, actor: [1:7608489834261285052:2309] 2026-02-19T08:26:25.411484Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a6/r3tmp/spilling-tmp-runner >> KqpBatchPEA::AbortState_DoubleAbort >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestKesusConfig >> TxUsage::WriteToTopic_Demo_38_Query [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestDeleteSemaphore |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> KqpBatchUpdate::TableWithSyncIndex+EnableIndexStreamWrite [GOOD] >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> KqpBatchPEA::PrepareState_UnknownEvent [GOOD] >> KqpBatchUpdate::ColumnTable >> TxUsage::WriteToTopic_Demo_40_Query >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> KqpBatchUpdate::HasTxControl [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpBatchUpdate::TableWithUniqueSyncIndex-EnableIndexStreamWrite [GOOD] >> KqpBatchDelete::SimpleOnePartition [GOOD] >> KqpBatchDelete::Returning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithSyncIndex+EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:26:23.986903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489826031536806:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:23.986980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:24.188905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:24.207546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:24.207643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:24.270760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:24.273199Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:24.274789Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489826031536775:2081] 1771489583985124 != 1771489583985127 2026-02-19T08:26:24.377413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:24.377439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:24.377447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:24.377563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:24.389428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:26.644682Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a4/r3tmp/spilling-tmp-runner/node_1_4df172be-556af533-385e145c-2b9596eb, actor: [1:7608489838916439509:2311] 2026-02-19T08:26:26.644982Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a4/r3tmp/spilling-tmp-runner 2026-02-19T08:26:27.360757Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.360824Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:27.361007Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:27.361044Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:27.361225Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 2 2026-02-19T08:26:27.361348Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.361443Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindSyncIndexTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL, Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.361616Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '"2")) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (return (Iterator (AsList $4 $5 $6 $7 $8))) )))) ) 2026-02-19T08:26:27.361640Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:27.361879Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:27.362266Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489843211407131:2326] 2026-02-19T08:26:27.362294Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489843211407131:2326], channels: 0 2026-02-19T08:26:27.362387Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:27.362421Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:27.362435Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:7608489843211407131:2326] 2026-02-19T08:26:27.362450Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489843211407131:2326], channels: 0 2026-02-19T08:26:27.362500Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489843211407131:2326], trace_id# 2026-02-19T08:26:27.362533Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489843211407131:2326], trace_id# 2026-02-19T08:26:27.362555Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:27.373849Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489843211407131:2326] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:27.373929Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489843211407131:2326], trace_id# 2026-02-19T08:26:27.373966Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489843211407131:2326], trace_id# 2026-02-19T08:26:27.375589Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489843211407128:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg3vzqd40qckydsm4mcprr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeAct ... 0Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489851801342048:2445], channels: 1 2026-02-19T08:26:29.622079Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489851801342049:2446], channels: 2 2026-02-19T08:26:29.622123Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:29.622137Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:29.622151Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489851801342049:2446] 2026-02-19T08:26:29.622172Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489851801342048:2445] 2026-02-19T08:26:29.622192Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489851801342048:2445], channels: 1 2026-02-19T08:26:29.622212Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489851801342049:2446], channels: 2 2026-02-19T08:26:29.622248Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.622275Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.622296Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:29.623662Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851801342048:2445] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:29.623696Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.623724Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.623758Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851801342049:2446] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:29.623778Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.623800Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489851801342048:2445], CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.624548Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851801342048:2445] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1494 Tasks { TaskId: 1 CpuTimeUs: 803 FinishTimeMs: 1771489589624 OutputBytes: 1 Tables { TablePath: "/Root/global_sync_idx/idx/indexImplTable" ReadRows: 5 ReadBytes: 40 AffectedPartitions: 1 } IngressRows: 5 ComputeCpuTimeUs: 111 BuildCpuTimeUs: 692 WaitOutputTimeUs: 22 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489589624 CreateTimeMs: 1771489589622 UpdateTimeMs: 1771489589624 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:29.624600Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851801342048:2445] 2026-02-19T08:26:29.624633Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.624658Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489851801342049:2446], trace_id# 2026-02-19T08:26:29.624760Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851801342049:2446] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 786 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 195 FinishTimeMs: 1771489589624 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 110 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489589624 CreateTimeMs: 1771489589623 UpdateTimeMs: 1771489589624 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:29.624776Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851801342049:2446] 2026-02-19T08:26:29.624876Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:29.624898Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:29.624924Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489851801342044:2326] TxId: 281474976710673. Ctx: { TraceId: 01khtg3y036vpesrbrbzph2tdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzMDIzZS01YjBhZDBhMy02OWFlNWMzZC0xMTFjMjBmZg==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002280s ReadRows: 5 ReadBytes: 40 RequestUnits# 5 ForceFlag# true trace_id# >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TKesusTest::TestRegisterProxy >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: 2026-02-19T08:26:24.835210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489830088942760:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:24.836519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:25.033524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:25.060553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:25.060653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:25.119009Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:25.120362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:25.120717Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489830088942728:2081] 1771489584832679 != 1771489584832682 2026-02-19T08:26:25.206033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:25.206053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:25.206058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:25.206118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:25.309676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:27.829310Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a1/r3tmp/spilling-tmp-runner/node_1_f4696097-6a97db8f-2923d016-8fb1ad29, actor: [1:7608489842973846448:2384] 2026-02-19T08:26:27.829500Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a1/r3tmp/spilling-tmp-runner 2026-02-19T08:26:29.922561Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922608Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922630Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922647Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922663Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922691Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922721Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922745Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922768Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922793Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922816Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.922859Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:29.923088Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:29.923125Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:29.923430Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:26:29.923594Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923652Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923710Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923760Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923829Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923880Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923931Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.923984Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.924045Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.924112Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 2-19T08:26:29.943604Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851563782130:2529] 2026-02-19T08:26:29.943632Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851563782134:2533], CA [1:7608489851563782131:2530], CA [1:7608489851563782132:2531], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.943653Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489851563782134:2533], CA [1:7608489851563782131:2530], CA [1:7608489851563782132:2531], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.943876Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851563782131:2530] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1437 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 460 FinishTimeMs: 1771489589940 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 450 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489589932 UpdateTimeMs: 1771489589940 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:26:29.943898Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851563782131:2530] 2026-02-19T08:26:29.943928Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851563782134:2533], CA [1:7608489851563782132:2531], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.943948Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489851563782134:2533], CA [1:7608489851563782132:2531], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.944153Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851563782132:2531] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1138 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 799 FinishTimeMs: 1771489589940 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 787 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489589933 UpdateTimeMs: 1771489589940 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:26:29.944178Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851563782132:2531] 2026-02-19T08:26:29.944200Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851563782134:2533], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.944220Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489851563782134:2533], CA [1:7608489851563782133:2532], trace_id# 2026-02-19T08:26:29.944490Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851563782133:2532] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1192 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 870 FinishTimeMs: 1771489589940 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 11 BuildCpuTimeUs: 859 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489589934 UpdateTimeMs: 1771489589940 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:26:29.944512Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851563782133:2532] 2026-02-19T08:26:29.944533Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489851563782134:2533], trace_id# 2026-02-19T08:26:29.944550Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489851563782134:2533], trace_id# 2026-02-19T08:26:29.944916Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489851563782134:2533] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1781 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1358 FinishTimeMs: 1771489589941 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 21 BuildCpuTimeUs: 1337 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489589936 UpdateTimeMs: 1771489589941 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:26:29.944939Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489851563782134:2533] 2026-02-19T08:26:29.944979Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489851563782120:2401] trace_id# 2026-02-19T08:26:29.945021Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.011313s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:26:29.968329Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:29.968372Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489851563782121:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg3xa2189whx63yqwved3n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NWIwZGUtYzA5MDhlZTMtMzcwNmNhODctMjZkNGVhNTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:30.145542Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yqs3669n9kt3tsad44p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzBmNzEwMjktMjZjMjZiZjctOTFkNDhjMDMtODg0YmI2OWM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:30.145796Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [1:7608489855858749634:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg3yqs3669n9kt3tsad44p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzBmNzEwMjktMjZjMjZiZjctOTFkNDhjMDMtODg0YmI2OWM=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [1:7608489855858749633:2536] trace_id# 2026-02-19T08:26:30.145984Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489855858749634:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg3yqs3669n9kt3tsad44p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzBmNzEwMjktMjZjMjZiZjctOTFkNDhjMDMtODg0YmI2OWM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:30.146004Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489855858749634:2536] TxId: 281474976715673. Ctx: { TraceId: 01khtg3yqs3669n9kt3tsad44p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzBmNzEwMjktMjZjMjZiZjctOTFkNDhjMDMtODg0YmI2OWM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithUniqueSyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:26:26.547138Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489840718368304:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:26.547701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:26.736149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:26.743936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:26.744069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:26.802808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:26.805641Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:26.807160Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489840718368274:2081] 1771489586545356 != 1771489586545359 2026-02-19T08:26:26.892725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:26.892753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:26.892761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:26.892920Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:27.024805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:29.318567Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/00069e/r3tmp/spilling-tmp-runner/node_1_18addd28-557bdcbc-d964535-ebf06618, actor: [1:7608489853603271012:2309] 2026-02-19T08:26:29.318795Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/00069e/r3tmp/spilling-tmp-runner 2026-02-19T08:26:30.502101Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 4 2026-02-19T08:26:30.502179Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:26:30.502209Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:30.502274Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"k" (Int32 '1))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct $1 '('"v1" (Just (String '"123"))) $2 $3)) (let $5 '('"k" (Int32 '2))) (let $6 (AsStruct $5 '('"v1" (Just (String '"124"))) $2 $3)) (let $7 '('"k" (Int32 '"3"))) (let $8 (AsStruct $7 '('"v1" (Just (String '"125"))) $2 $3)) (let $9 '('"k" (Int32 '"4"))) (let $10 (AsStruct $9 '('"v1" (Just (String '"126"))) $2 $3)) (let $11 '('"k" (Int32 '"5"))) (let $12 (AsStruct $11 '('"v1" (Just (String '"127"))) $2 $3)) (let $13 (AsList $4 $6 $8 $10 $12)) (let $14 '('"k" (DataType 'Int32))) (let $15 (OptionalType (DataType 'String))) (let $16 '('"v1" $15)) (let $17 (StructType $14)) (let $18 (VariantType (TupleType (ListType (StructType $14 $16 '('"v2" $15) '('"v3" $15))) (ListType (StructType $16)) (DataType 'Bool) (DictType $17 (VoidType))))) (let $19 (Variant $13 '0 $18)) (let $20 (OrderedFilter $13 (lambda '($27) (Exists (Member $27 '"v1"))))) (let $21 (ToDict $20 (lambda '($28) (AsStruct '('"v1" (Member $28 '"v1")))) (lambda '($29) (Void)) '('One '"Hashed"))) (let $22 (Variant (DictKeys $21) '1 $18)) (let $23 (Variant (== (Length $20) (Length $21)) '2 $18)) (let $24 (List (ListType $17) (AsStruct $1) (AsStruct $5) (AsStruct $7) (AsStruct $9) (AsStruct $11))) (let $25 (ToDict $24 (lambda '($30) $30) (lambda '($31) (Void)) '('One 'Auto))) (let $26 (Variant $25 '"3" $18)) (return (Iterator (AsList $19 $22 $23 $26))) )))) ) trace_id# 2026-02-19T08:26:30.502330Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 3 2026-02-19T08:26:30.502356Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 2 from task: 1 with index: 0 2026-02-19T08:26:30.502374Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 3 from task: 1 with index: 2 2026-02-19T08:26:30.502380Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 4 from task: 1 with index: 1 2026-02-19T08:26:30.506808Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 4 trace_id# 2026-02-19T08:26:30.530714Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 4 2026-02-19T08:26:30.530791Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 7199.293785s cancel_after# trace_id# 2026-02-19T08:26:30.530834Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:30.530889Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"k" (Int32 '1))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct $1 '('"v1" (Just (String '"123"))) $2 $3)) (let $5 '('"k" (Int32 '2))) (let $6 (AsStruct $5 '('"v1" (Just (String '"124"))) $2 $3)) (let $7 '('"k" (Int32 '"3"))) (let $8 (AsStruct $7 '('"v1" (Just (String '"125"))) $2 $3)) (let $9 '('"k" (Int32 '"4"))) (let $10 (AsStruct $9 '('"v1" (Just (String '"126"))) $2 $3)) (let $11 '('"k" (Int32 '"5"))) (let $12 (AsStruct $11 '('"v1" (Just (String '"127"))) $2 $3)) (let $13 (AsList $4 $6 $8 $10 $12)) (let $14 '('"k" (DataType 'Int32))) (let $15 (OptionalType (DataType 'String))) (let $16 '('"v1" $15)) (let $17 (StructType $14)) (let $18 (VariantType (TupleType (ListType (StructType $14 $16 '('"v2" $15) '('"v3" $15))) (ListType (StructType $16)) (DataType 'Bool) (DictType $17 (VoidType))))) (let $19 (Variant $13 '0 $18)) (let $20 (OrderedFilter $13 (lambda '($27) (Exists (Member $27 '"v1"))))) (let $21 (ToDict $20 (lambda '($28) (AsStruct '('"v1" (Member $28 '"v1")))) (lambda '($29) (Void)) '('One '"Hashed"))) (let $22 (Variant (DictKeys $21) '1 $18)) (let $23 (Variant (== (Length $20) (Length $21)) '2 $18)) (let $24 (List (ListType $17) (AsStruct $1) (AsStruct $5) (AsStruct $7) (AsStruct $9) (AsStruct $11))) (let $25 (ToDict $24 (lambda '($30) $30) (lambda '($31) (Void)) '('One 'Auto))) (let $26 (Variant $25 '"3" $18)) (return (Iterator (AsList $19 $22 $23 $26))) )))) ) trace_id# 2026-02-19T08:26:30.530967Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 3 2026-02-19T08:26:30.530980Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 2 from task: 1 with index: 0 2026-02-19T08:26:30.530988Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 3 from task: 1 with index: 2 2026-02-19T08:26:30.530994Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 4 from task: 1 with index: 1 2026-02-19T08:26:30.532097Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Execution is complete results_size# 4 trace_id# 2026-02-19T08:26:30.532493Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:30.532524Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2026-02-19T08:26:30.532550Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 1, OutputsCount: 1 2026-02-19T08:26:30.532612Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:30.532772Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489857898238644:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 3 trace_id# 2026-02-19T08:26:30.532812Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489857898238644:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:30.532998Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 0 2026-02-19T08:26:30.533078Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_3 (ListType (StructType '('"v1" (OptionalType (DataType 'String)))))) (return (lambda '() (Iterator %kqp%tx_result_binding_0_3))) ) 2026-02-19T08:26:30.533106Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:30.533123Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,1] AST: ( (declare %kqp%tx_result_binding_0_0 (DictType (StructType '('"k" (DataType 'Int32))) (VoidType))) (return (lambda '($1) (Map (Filter (Take $1 (Uint64 '1)) (lambda '($2) (Not (Contains %kqp%tx_re ... tabase: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489857898238670:2371] 2026-02-19T08:26:30.560599Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489857898238672:2373], CA [1:7608489857898238669:2370], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.560617Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489857898238672:2373], CA [1:7608489857898238669:2370], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.560664Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489857898238674:2374] TaskId# 5 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:30.560688Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489857898238672:2373], CA [1:7608489857898238669:2370], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.560703Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [1:7608489857898238672:2373], CA [1:7608489857898238669:2370], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.560769Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489857898238669:2370] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1113 Tasks { TaskId: 1 CpuTimeUs: 473 FinishTimeMs: 1771489590559 EgressBytes: 105 EgressRows: 5 ComputeCpuTimeUs: 33 BuildCpuTimeUs: 440 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489590555 UpdateTimeMs: 1771489590560 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:30.560780Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489857898238669:2370] 2026-02-19T08:26:30.560800Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489857898238672:2373], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.560820Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [1:7608489857898238672:2373], CA [1:7608489857898238674:2374], CA [1:7608489857898238671:2372], trace_id# 2026-02-19T08:26:30.561260Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489857898238671:2372] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1471 DurationUs: 2000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 1175 FinishTimeMs: 1771489590560 InputBytes: 1 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 1156 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489590558 CreateTimeMs: 1771489590557 UpdateTimeMs: 1771489590560 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:30.561300Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489857898238671:2372] 2026-02-19T08:26:30.561325Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489857898238672:2373], CA [1:7608489857898238674:2374], trace_id# 2026-02-19T08:26:30.561339Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489857898238672:2373], CA [1:7608489857898238674:2374], trace_id# 2026-02-19T08:26:30.561576Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489857898238672:2373] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 3866 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 1651 FinishTimeMs: 1771489590561 OutputRows: 5 OutputBytes: 52 ComputeCpuTimeUs: 107 BuildCpuTimeUs: 1544 WaitOutputTimeUs: 19 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489590558 UpdateTimeMs: 1771489590561 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:30.561594Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489857898238672:2373] 2026-02-19T08:26:30.561610Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489857898238674:2374], trace_id# 2026-02-19T08:26:30.561631Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489857898238674:2374], trace_id# 2026-02-19T08:26:30.561688Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489857898238674:2374] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2024 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 995 FinishTimeMs: 1771489590561 InputRows: 5 InputBytes: 52 EgressBytes: 80 EgressRows: 5 ComputeCpuTimeUs: 76 BuildCpuTimeUs: 919 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489590560 CreateTimeMs: 1771489590559 UpdateTimeMs: 1771489590561 } MaxMemoryUsage: 3145728 } trace_id# 2026-02-19T08:26:30.561703Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489857898238674:2374] 2026-02-19T08:26:30.561790Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:7608489857898238643:2325] trace_id# 2026-02-19T08:26:30.561829Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.010704s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:26:30.566293Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:30.566334Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489857898238664:2325] TxId: 281474976710663. Ctx: { TraceId: 01khtg3yj039t85krkh86acyya, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVkOWE0MzEtNmQ5YjkxMWItMTc4Y2VkYTQtNzliMTA5Y2U=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> KqpBatchUpdate::TableWithSyncIndex-EnableIndexStreamWrite [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestQuoterResourceModification >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestParentUpdateDoesNotClampLeafFreeResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestParentUpdateDoesNotNotifyUnchangedLeaf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2026-02-19T08:26:29.293519Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.293686Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.308672Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.308767Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.322973Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.706309Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.706417Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.726781Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.726897Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.764799Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.765329Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=3423092082784263049, session=0, seqNo=0) 2026-02-19T08:26:29.765536Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:29.777466Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=3423092082784263049, session=1) 2026-02-19T08:26:29.777797Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:29.777929Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:29.778008Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:29.789979Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=111) 2026-02-19T08:26:29.790900Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:149:2171], cookie=4441691969865396366, name="Sem1", limit=42) 2026-02-19T08:26:29.791049Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-02-19T08:26:29.803097Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:149:2171], cookie=4441691969865396366) 2026-02-19T08:26:29.803646Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:154:2176], cookie=5650920183329081650, name="Sem1", limit=42) 2026-02-19T08:26:29.815832Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:154:2176], cookie=5650920183329081650) 2026-02-19T08:26:29.816355Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:159:2181], cookie=2889726971500977125, name="Sem1", limit=51) 2026-02-19T08:26:29.828083Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:159:2181], cookie=2889726971500977125) 2026-02-19T08:26:29.828475Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:164:2186], cookie=14674584423060914429, name="Lock1", limit=42) 2026-02-19T08:26:29.840528Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:164:2186], cookie=14674584423060914429) 2026-02-19T08:26:29.841211Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:169:2191], cookie=12818520078021313036, name="Lock1", limit=18446744073709551615) 2026-02-19T08:26:29.852947Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:169:2191], cookie=12818520078021313036) 2026-02-19T08:26:29.853426Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2196], cookie=6985426416695854410, name="Sem1") 2026-02-19T08:26:29.853499Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2196], cookie=6985426416695854410) 2026-02-19T08:26:29.853966Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:177:2199], cookie=10238281415013647081, name="Sem2") 2026-02-19T08:26:29.854017Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:177:2199], cookie=10238281415013647081) 2026-02-19T08:26:29.865365Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.865456Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.865807Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.865995Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.912647Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.912784Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:29.913149Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:216:2229], cookie=18045817424141155537, name="Sem1") 2026-02-19T08:26:29.913226Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:216:2229], cookie=18045817424141155537) 2026-02-19T08:26:29.913825Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:223:2235], cookie=9779941649424700386, name="Sem2") 2026-02-19T08:26:29.913892Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:223:2235], cookie=9779941649424700386) 2026-02-19T08:26:30.327426Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:30.327539Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:30.347374Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:30.347496Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:30.372273Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:30.372828Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=12391968031422887181, session=0, seqNo=0) 2026-02-19T08:26:30.372983Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:30.384973Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=12391968031422887181, session=1) 2026-02-19T08:26:30.385232Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:30.385373Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:30.385465Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:30.397402Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:137:2161], cookie=111) 2026-02-19T08:26:30.398053Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2170], cookie=1773491366222509856, name="Lock1", force=0) 2026-02-19T08:26:30.410238Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2170], cookie=1773491366222509856) 2026-02-19T08:26:30.410773Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:153:2175], cookie=10563486494620417256, name="Sem1", force=0) 2026-02-19T08:26:30.422955Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:153:2175], cookie=10563486494620417256) 2026-02-19T08:26:30.423509Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:158:2180], cookie=910258650663266591, name="Sem1", limit=42) 2026-02-19T08:26:30.423654Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-02-19T08:26:30.435878Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:158:2180], cookie=910258650663266591) 2026-02-19T08:26:30.436404Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2185], cookie=9767792576690995048, name="Sem1", force=0) 2026-02-19T08:26:30.436505Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2026-02-19T08:26:30.448648Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2185], cookie=9767792576690995048) 2026-02-19T08:26:30.449249Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:168:2190], cookie=11106629204960766932, name="Sem1", force=0) 2026-02-19T08:26:30.461576Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:168:2190], cookie=11106629204960766932) 2026-02-19T08:26:30.761110Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:30.761203Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:30.775541Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:30.775698Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:30.799961Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:30.800453Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=17539524443761745997, session=0, seqNo=0) 2026-02-19T08:26:30.800573Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:30.812550Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=17539524443761745997, session=1) 2026-02-19T08:26:30.812827Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], co ... 6Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:32.229591Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:32.240338Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=129, session=1, semaphore="Sem2" count=2) 2026-02-19T08:26:32.252594Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=129) 2026-02-19T08:26:32.253020Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=130, name="Sem2") 2026-02-19T08:26:32.253121Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=130) 2026-02-19T08:26:32.253430Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=131, session=1, semaphore="Sem2" count=1) 2026-02-19T08:26:32.265810Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=131) 2026-02-19T08:26:32.266249Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=132, name="Sem2") 2026-02-19T08:26:32.266354Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=132) 2026-02-19T08:26:32.266651Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=133, name="Sem2") 2026-02-19T08:26:32.266745Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=133) 2026-02-19T08:26:32.628747Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:32.628874Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:32.650230Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:32.651386Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:32.686238Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:32.693173Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=18425703355375869263, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2026-02-19T08:26:32.693460Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2026-02-19T08:26:32.706040Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=18425703355375869263) 2026-02-19T08:26:32.706706Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=12061624625499142859, path="/Root1/Res", config={ }) 2026-02-19T08:26:32.706966Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2026-02-19T08:26:32.721305Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=12061624625499142859) 2026-02-19T08:26:32.722065Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2173], cookie=2252557667268099552, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2026-02-19T08:26:32.722273Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2026-02-19T08:26:32.734620Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2173], cookie=2252557667268099552) 2026-02-19T08:26:32.735257Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2178], cookie=3523498086281485039, path="/Root2/Res", config={ }) 2026-02-19T08:26:32.735492Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2026-02-19T08:26:32.747631Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2178], cookie=3523498086281485039) 2026-02-19T08:26:32.748179Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:161:2183], cookie=4843115904350865188, path="/Root2/Res/Subres", config={ }) 2026-02-19T08:26:32.748379Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2026-02-19T08:26:32.760576Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:161:2183], cookie=4843115904350865188) 2026-02-19T08:26:32.766222Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 415754983702141474. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:32.766324Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=415754983702141474) 2026-02-19T08:26:32.808728Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:32.861172Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:32.892486Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:32.893229Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2192]. Cookie: 8778585702680649994. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-02-19T08:26:32.894092Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 9554425078778808500. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:32.894151Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=9554425078778808500) 2026-02-19T08:26:32.936039Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:32.977903Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:32.978648Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2199]. Cookie: 14131007900841199774. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-02-19T08:26:32.979372Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 4477764427223488762. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:32.979416Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=4477764427223488762) 2026-02-19T08:26:32.979946Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 8271752092487910772. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:32.979985Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=8271752092487910772) 2026-02-19T08:26:33.011127Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:33.011243Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:33.011908Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:190:2206]. Cookie: 2032110184530907134. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestParentUpdateDoesNotNotifyUnchangedLeaf [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithSyncIndex-EnableIndexStreamWrite [GOOD] Test command err: 2026-02-19T08:26:25.892997Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489833379242958:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:25.893056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:26.100222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:26.118336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:26.118412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:26.158513Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:26.159629Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489833379242927:2081] 1771489585891450 != 1771489585891453 2026-02-19T08:26:26.169209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:26.233912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:26.233943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:26.233955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:26.234070Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:26.268397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:28.718271Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/00069f/r3tmp/spilling-tmp-runner/node_1_3345f511-da50d2dc-57890589-57597620, actor: [1:7608489846264145665:2309] 2026-02-19T08:26:28.718823Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/00069f/r3tmp/spilling-tmp-runner 2026-02-19T08:26:29.693543Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 2 2026-02-19T08:26:29.693622Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:26:29.693652Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:29.693682Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '2)) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (let $9 (AsList $4 $5 $6 $7 $8)) (let $10 (PartitionByKey $9 (lambda '($16) (Member $16 '"k")) (Void) (Void) (lambda '($17) (FlatMap $17 (lambda '($18) (Last (ForwardList (Nth $18 '1)))))))) (let $11 '('"k" (DataType 'Int32))) (let $12 (OptionalType (DataType 'String))) (let $13 (VariantType (TupleType (ListType (StructType $11 '('"v1" $12) '('"v2" $12) '('"v3" $12))) (ListType (StructType $11))))) (let $14 (Variant $10 '0 $13)) (let $15 (Variant (Map $10 (lambda '($19) (AsStruct '('"k" (Member $19 '"k"))))) '1 $13)) (return (Iterator (AsList $14 $15))) )))) ) trace_id# 2026-02-19T08:26:29.693742Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:26:29.693749Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 2 from task: 1 with index: 1 2026-02-19T08:26:29.697823Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 2 trace_id# 2026-02-19T08:26:29.719927Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 2 2026-02-19T08:26:29.719988Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 7199.491867s cancel_after# trace_id# 2026-02-19T08:26:29.720005Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:29.720028Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"v1" (Just (String '"123")))) (let $2 '('"v2" (Just (String '"456")))) (let $3 '('"v3" (Just (String '"789")))) (let $4 (AsStruct '('"k" (Int32 '1)) $1 $2 $3)) (let $5 (AsStruct '('"k" (Int32 '2)) $1 $2 $3)) (let $6 (AsStruct '('"k" (Int32 '"3")) $1 $2 $3)) (let $7 (AsStruct '('"k" (Int32 '"4")) $1 $2 $3)) (let $8 (AsStruct '('"k" (Int32 '"5")) $1 $2 $3)) (let $9 (AsList $4 $5 $6 $7 $8)) (let $10 (PartitionByKey $9 (lambda '($16) (Member $16 '"k")) (Void) (Void) (lambda '($17) (FlatMap $17 (lambda '($18) (Last (ForwardList (Nth $18 '1)))))))) (let $11 '('"k" (DataType 'Int32))) (let $12 (OptionalType (DataType 'String))) (let $13 (VariantType (TupleType (ListType (StructType $11 '('"v1" $12) '('"v2" $12) '('"v3" $12))) (ListType (StructType $11))))) (let $14 (Variant $10 '0 $13)) (let $15 (Variant (Map $10 (lambda '($19) (AsStruct '('"k" (Member $19 '"k"))))) '1 $13)) (return (Iterator (AsList $14 $15))) )))) ) trace_id# 2026-02-19T08:26:29.720050Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:26:29.720056Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 2 from task: 1 with index: 1 2026-02-19T08:26:29.720649Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Execution is complete results_size# 2 trace_id# 2026-02-19T08:26:29.720926Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.720946Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2026-02-19T08:26:29.720955Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 1, OutputsCount: 1 2026-02-19T08:26:29.720981Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:29.721429Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489850559113296:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 3 trace_id# 2026-02-19T08:26:29.721484Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489850559113296:2325] TxId: 281474976710661. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:29.721626Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710661. Resolved key sets: 0 2026-02-19T08:26:29.721681Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_1 (ListType (StructType '('"k" (DataType 'Int32))))) (return (lambda '() (Iterator %kqp%tx_result_binding_0_1))) ) 2026-02-19T08:26:29.721696Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:29.721737Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,1] AST: ( (return (lambda '($1) (block '( (let $2 (lambda '($3) (Member $3 '"k") (Member $3 '"v1") (Member $3 '"v2"))) (return (FromFlow (ExpandMap (ToFlow $1) $2))) )))) ) 2026-02-19T08:26:29.721768Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,1] create compute task: 2 2026-02-19T08:26:29.721824Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:824: TxId: 281474976710661. Stage [0,1] create channelId: 1 from task: 1 to task: 2 of type StreamLookup/Map without spilling 2026-02-19T08:26:29.721832Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,2] AST: ( (return (lambda '($1) (FromFlow (Map (SqueezeToList (NarrowMap (ToFlow $1) (lambda '($2 $3 $4) (AsStruct '('"k" $2) '('"v1" $3) '('"v2" $4)))) (Uint64 '0)) (lambda '($5) (ToDict $5 (lambda '($6) (AsStruct '('"k" (Member $6 '"k")))) (lambda '($7) (AsStruct '('"v1" (Member $7 '"v1")) '('"v2" (Member $7 '"v2")))) '('"One" '"Hashed"))))))) ) 2026-02-19T08:26:29.721852Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,2] create compute task: 3 2026-02-19T08:26:29.721891Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:842: Chose [0:1] outputIdx: 0 to propagate through inputs stages of the stage [0:2]: [ NULL ] 2026-02-19T08:26:29.721918Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:824: TxId: 281474976710661. Stage [0,2] create channelId: 2 from task: 2 to task: 3 of type UnionAll/Map without spilling 2026-02-19T08:26:29.721932Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 3 from task: 3 with index: 0 2026-02-19T08:26:29.722016Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710661. Ctx: { TraceId: 01khtg3xyv5dkx7whswyc4q4sq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Total ... XECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489863444015563:2466], channels: 2 2026-02-19T08:26:32.485452Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489863444015562:2465], channels: 1 2026-02-19T08:26:32.485495Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:32.485509Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:32.485523Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 2 at actor [1:7608489863444015563:2466] 2026-02-19T08:26:32.485538Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:889: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Task: 2, input channelId: 1, src task: 1, at actor [1:7608489863444015562:2465] 2026-02-19T08:26:32.485561Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489863444015563:2466], channels: 2 2026-02-19T08:26:32.485576Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:7608489863444015562:2465], channels: 1 2026-02-19T08:26:32.485605Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.485652Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.485672Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:32.486984Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489863444015562:2465] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:32.487027Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.487051Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.487100Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489863444015563:2466] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:32.487121Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.487140Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [1:7608489863444015563:2466], CA [1:7608489863444015562:2465], trace_id# 2026-02-19T08:26:32.488420Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489863444015562:2465] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1666 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 844 FinishTimeMs: 1771489592488 OutputBytes: 1 Tables { TablePath: "/Root/global_sync_idx/idx/indexImplTable" ReadRows: 5 ReadBytes: 40 AffectedPartitions: 1 } IngressRows: 5 ComputeCpuTimeUs: 100 BuildCpuTimeUs: 744 WaitOutputTimeUs: 40 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489592487 CreateTimeMs: 1771489592485 UpdateTimeMs: 1771489592488 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:32.488469Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489863444015562:2465] 2026-02-19T08:26:32.488496Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7608489863444015563:2466], trace_id# 2026-02-19T08:26:32.488518Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:7608489863444015563:2466], trace_id# 2026-02-19T08:26:32.488626Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7608489863444015563:2466] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 858 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 157 FinishTimeMs: 1771489592488 InputBytes: 1 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 77 HostName: "ghrun-fkrokuvqke" NodeId: 1 StartTimeMs: 1771489592488 CreateTimeMs: 1771489592486 UpdateTimeMs: 1771489592488 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:32.488644Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7608489863444015563:2466] 2026-02-19T08:26:32.488730Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:32.488748Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:32.488770Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [1:7608489863444015558:2325] TxId: 281474976710674. Ctx: { TraceId: 01khtg40rebm2e963apap7cygf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmU3MDYwNjktNzI2NWY0ZjktOTRhOTMwZTMtNmVkOWU2YTA=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002524s ReadRows: 5 ReadBytes: 40 RequestUnits# 5 ForceFlag# true trace_id# >> TKesusTest::TestQuoterAccountResourcesOnDemand >> KqpBatchPEA::AbortState_DoubleAbort [GOOD] >> KqpBatchPEA::AbortState_AbortFromExecuterActor >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2026-02-19T08:26:32.210876Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:32.211056Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:32.229442Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:32.229559Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:32.244390Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:32.668110Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:32.668220Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:32.685626Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:32.685789Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:32.722687Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.100070Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.100203Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.117977Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.118095Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.142633Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.553624Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.553767Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.572103Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.572293Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.596657Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.601549Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=16056332391576497519, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-02-19T08:26:33.601833Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:33.613994Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=16056332391576497519) 2026-02-19T08:26:33.614601Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:146:2168], cookie=14518198166538244018, path="/Root/Q", config={ MaxUnitsPerSecond: 100 }) 2026-02-19T08:26:33.614863Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Q" 2026-02-19T08:26:33.627674Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:146:2168], cookie=14518198166538244018) 2026-02-19T08:26:33.628312Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:151:2173], cookie=5697935202791330223, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.628427Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:151:2173], cookie=5697935202791330223) 2026-02-19T08:26:33.642745Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.642888Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.643378Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.643984Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.681367Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.681809Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:190:2203], cookie=5608437586347233999, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.681900Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:190:2203], cookie=5608437586347233999) 2026-02-19T08:26:33.682509Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[4:196:2208], cookie=10935314624384062736, id=1, path="", config={ MaxUnitsPerSecond: 5 }) 2026-02-19T08:26:33.682644Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2026-02-19T08:26:33.705292Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[4:196:2208], cookie=10935314624384062736) 2026-02-19T08:26:33.706045Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[4:201:2213], cookie=12379862806522906395, id=0, path="/Root/Q", config={ MaxUnitsPerSecond: 5 }) 2026-02-19T08:26:33.706236Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 2 "Root/Q" 2026-02-19T08:26:33.718745Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[4:201:2213], cookie=12379862806522906395) 2026-02-19T08:26:33.719394Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:206:2218], cookie=12478648648636284559, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.719493Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:206:2218], cookie=12478648648636284559) 2026-02-19T08:26:33.733227Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.733336Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.733907Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.734805Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.772616Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.773018Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:245:2248], cookie=10689223876872643151, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.773129Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:245:2248], cookie=10689223876872643151) 2026-02-19T08:26:33.774317Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:254:2256], cookie=4405915051795576547, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.774399Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:254:2256], cookie=4405915051795576547) 2026-02-19T08:26:33.787566Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.787675Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.788206Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.788891Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.826679Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.827162Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:293:2286], cookie=8227161295613004587, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.827253Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:293:2286], cookie=8227161295613004587) 2026-02-19T08:26:33.828467Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:302:2294], cookie=9849874679677976132, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.828541Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:302:2294], cookie=9849874679677976132) 2026-02-19T08:26:33.841642Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.841839Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.842360Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.842553Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.900996Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.901443Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:341:2324], cookie=3355372373940212830, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.901529Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:341:2324], cookie=3355372373940212830) 2026-02-19T08:26:33.902884Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:351:2332], cookie=2247085881277905607, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.902961Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:351:2332], cookie=2247085881277905607) 2026-02-19T08:26:33.916066Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.916195Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.916700Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.917378Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.964984Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.965284Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:390:2362], cookie=7549244371840636366, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.965354Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:390:2362], cookie=7549244371840636366) 2026-02-19T08:26:33.966069Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[4:396:2367], cookie=14420625719098669160, id=0, path="/Root/P", config={ MaxUnitsPerSecond: 42 }) 2026-02-19T08:26:33.966140Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[4:396:2367], cookie=14420625719098669160) 2026-02-19T08:26:33.966651Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:399:2370], cookie=14619821170577787047, ids=[], paths=[], recursive=1) 2026-02-19T08:26:33.966704Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:399:2370], cookie=14619821170577787047) 2026-02-19T08:26:33.977126Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.977234Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.977647Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.978280Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:34.035360Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:34.035893Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:438:2400], cookie=18333682564222502863, ids=[], paths=[], recursive=1) 2026-02-19T08:26:34.036015Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:438:2400], cookie=18333682564222502863) 2026-02-19T08:26:34.037005Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[4:444:2405], cookie=1299993476704565749, id=42, path="", config={ MaxUnitsPerSecond: 42 }) 2026-02-19T08:26:34.037099Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[4:444:2405], cookie=1299993476704565749) 2026-02-19T08:26:34.037875Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:447:2408], cookie=14307353039127756943, ids=[], paths=[], recursive=1) 2026-02-19T08:26:34.037941Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:447:2408], cookie=14307353039127756943) 2026-02-19T08:26:34.049978Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:34.050077Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:34.050483Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:34.050938Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:34.099041Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:34.099439Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:486:2438], cookie=2448366737175469021, ids=[], paths=[], recursive=1) 2026-02-19T08:26:34.099521Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:486:2438], cookie=2448366737175469021) 2026-02-19T08:26:34.610601Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:34.610711Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:34.630811Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:34.631383Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:34.666376Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:34.666837Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=14929327422251946725, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:26:34.667046Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2026-02-19T08:26:34.679302Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=14929327422251946725) 2026-02-19T08:26:34.680009Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=769767958226149322, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-02-19T08:26:34.680209Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2026-02-19T08:26:34.692086Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=769767958226149322) 2026-02-19T08:26:34.695365Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 6249492313018953261. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:34.695452Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=6249492313018953261) 2026-02-19T08:26:34.696281Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 4317985862552954910. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2026-02-19T08:26:34.696345Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=4317985862552954910) >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink >> TKesusTest::TestUnregisterProxy |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2026-02-19T08:26:30.072292Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:30.072431Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:30.090794Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:30.090909Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:30.105949Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:30.106372Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:135:2159], cookie=15569238932484415190, path="/foo/bar/baz") 2026-02-19T08:26:30.129800Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:135:2159], cookie=15569238932484415190, status=SUCCESS) 2026-02-19T08:26:30.130386Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:144:2166], cookie=7029138314014843947) 2026-02-19T08:26:30.142836Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:144:2166], cookie=7029138314014843947) 2026-02-19T08:26:30.143468Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:149:2171], cookie=382914652030604887, path="/foo/bar/baz") 2026-02-19T08:26:30.155605Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:149:2171], cookie=382914652030604887, status=SUCCESS) 2026-02-19T08:26:30.156097Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:154:2176], cookie=4611300199373904411) 2026-02-19T08:26:30.168084Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:154:2176], cookie=4611300199373904411) 2026-02-19T08:26:30.178742Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:30.178837Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:30.179195Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:30.179649Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:30.215919Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:30.216322Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:195:2208], cookie=6528567916079138731) 2026-02-19T08:26:30.238937Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:195:2208], cookie=6528567916079138731) 2026-02-19T08:26:30.239667Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:203:2215], cookie=6505362978404708508, path="/foo/bar/baz") 2026-02-19T08:26:30.252427Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:203:2215], cookie=6505362978404708508, status=SUCCESS) 2026-02-19T08:26:30.253082Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:208:2220], cookie=14214730975852332062, path="/foo/bar/baz") 2026-02-19T08:26:30.253183Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:208:2220], cookie=14214730975852332062, status=PRECONDITION_FAILED) 2026-02-19T08:26:30.668882Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:30.668990Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:30.686636Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:30.686757Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:30.721835Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:30.722216Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:134:2159], cookie=17740920724272155073, name="Lock1") 2026-02-19T08:26:30.722304Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:134:2159], cookie=17740920724272155073) 2026-02-19T08:26:31.120013Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:31.120124Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:31.138841Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:31.138965Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:31.163287Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:31.172407Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:137:2161], cookie=14454944916343346067, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:26:31.172764Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:31.184532Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:137:2161], cookie=14454944916343346067) 2026-02-19T08:26:31.185589Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:147:2169]. Cookie: 10050160870137556315. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:31.185638Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:147:2169], cookie=10050160870137556315) 2026-02-19T08:26:31.186160Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:147:2169]. Cookie: 2771602248672018887. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:31.186203Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:147:2169], cookie=2771602248672018887) 2026-02-19T08:26:31.186563Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:147:2169]. Cookie: 17330399282977720198. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-02-19T08:26:31.186601Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:147:2169], cookie=17330399282977720198) 2026-02-19T08:26:31.186877Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:147:2169]. Cookie: 8291308264334634267. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-02-19T08:26:31.186909Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:147:2169], cookie=8291308264334634267) 2026-02-19T08:26:33.396024Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:33.396145Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:33.416912Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:33.417134Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:33.441965Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:33.442411Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=6578604510580654294, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-02-19T08:26:33.442747Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:33.454780Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=6578604510580654294) 2026-02-19T08:26:33.455789Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 3096281078515629993. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:33.455851Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=3096281078515629993) 2026-02-19T08:26:33.456316Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 14828683507460206548. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2026-02-19T08:26:33.456367Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=14828683507460206548) 2026-02-19T08:26:35.662793Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:35.662908Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:35.682060Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:35.682717Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:35.717687Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:35.718233Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=16824965226738312295, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-02-19T08:26:35.718441Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:35.730511Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=16824965226738312295) 2026-02-19T08:26:35.731122Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=9007706108044558370, path="/Root/Res", config={ }) 2026-02-19T08:26:35.731365Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-02-19T08:26:35.750732Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=9007706108044558370) 2026-02-19T08:26:35.751547Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 11113975322683237263. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:35.751605Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=11113975322683237263) 2026-02-19T08:26:35.752151Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:155:2177], cookie=2621366464330256475, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2026-02-19T08:26:35.752325Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2026-02-19T08:26:35.752499Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2026-02-19T08:26:35.764364Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:155:2177], cookie=2621366464330256475) 2026-02-19T08:26:35.764872Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 2167612935949551531. Data: { } 2026-02-19T08:26:35.764915Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=2167612935949551531) >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TxUsage::WriteToTopic_Demo_47_Query [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> KqpBatchUpdate::ColumnTable [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [GOOD] >> KqpBatchDelete::Returning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:58.290255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:58.367055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:58.373489Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:58.373781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:58.373912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:58.693555Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:58.781574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:58.781675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:58.838907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:58.929147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:59.535414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:59.535462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:59.535485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:59.537586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:59.537977Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:59.600193Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:00.098041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:02.433560Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:02.438609Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:02.441454Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:02.464311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:02.464414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:02.502199Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:02.503594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:02.675247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:02.675366Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:02.676432Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.677219Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.677882Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678262Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678522Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678650Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678756Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.678815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:02.692896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:02.841548Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:02.878370Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:02.878465Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:02.907152Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:02.907304Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:02.907471Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:02.907528Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:02.907568Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:02.907622Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:02.907666Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:02.907702Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:02.908065Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:02.911836Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:02.911934Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:02.932462Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2182:2627] 2026-02-19T08:26:02.933285Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2182:2627], schemeshard id = 72075186224037897 2026-02-19T08:26:02.999279Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2267:2669] 2026-02-19T08:26:03.001331Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:03.012925Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Describe result: PathErrorUnknown 2026-02-19T08:26:03.013002Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Creating table 2026-02-19T08:26:03.013090Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:03.019034Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2325:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:03.023142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:03.030588Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:03.030707Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:03.043555Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:03.233261Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:03.345380Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:03.547703Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:03.685006Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:03.685090Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Column diff is empty, finishing 2026-02-19T08:26:04.376656Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... sion/3?node_id=2&id=Nzc2YTMyYi1jNDJmNmU3ZS1kNzQ5OTg5OC1hY2RlODJhNg==, TxId: 2026-02-19T08:26:33.816430Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5440:4690], ActorId: [2:5441:4691], Got response [2:5442:4692] SUCCESS 2026-02-19T08:26:33.816859Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:33.831392Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-02-19T08:26:33.831447Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:33.959510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:33.959631Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:34.027756Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:5453:4703], schemeshard count = 1 2026-02-19T08:26:35.071443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:711: [72075186224037894] ScheduleNextBackgroundTraversal 2026-02-19T08:26:35.071526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:842: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-02-19T08:26:35.071576Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:754: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:35.076788Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:35.107224Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:35.107907Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:35.107993Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:35.109348Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:35.146871Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:35.147282Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:35.148362Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5534:4747], server id = [2:5538:4751], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:35.148787Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5534:4747], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:35.150207Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5535:4748], server id = [2:5539:4752], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:35.150292Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5535:4748], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:35.150678Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5536:4749], server id = [2:5540:4753], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:35.150743Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5536:4749], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:35.151901Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5537:4750], server id = [2:5541:4754], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:35.151964Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5537:4750], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:35.159192Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:35.159611Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:35.160417Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5534:4747], server id = [2:5538:4751], tablet id = 72075186224037899 2026-02-19T08:26:35.160481Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:35.161094Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5535:4748], server id = [2:5539:4752], tablet id = 72075186224037900 2026-02-19T08:26:35.161135Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:35.162046Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:35.162373Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5536:4749], server id = [2:5540:4753], tablet id = 72075186224037901 2026-02-19T08:26:35.162408Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:35.162920Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:35.162976Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:35.163150Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:35.163262Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:35.163593Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5537:4750], server id = [2:5541:4754], tablet id = 72075186224037902 2026-02-19T08:26:35.163627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:35.164330Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:35.190448Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:35.190721Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-02-19T08:26:35.191405Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5556:4765], server id = [2:5557:4766], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:35.191502Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5556:4765], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:35.192766Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:35.192830Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:35.192945Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:35.193088Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:35.193478Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5559:4768], ActorId: [2:5560:4769], Starting query actor #1 [2:5561:4770] 2026-02-19T08:26:35.193545Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5560:4769], ActorId: [2:5561:4770], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:35.195834Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5556:4765], server id = [2:5557:4766], tablet id = 72075186224037900 2026-02-19T08:26:35.195872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:35.196579Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5560:4769], ActorId: [2:5561:4770], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YmI2NjgwZTAtNmQ2NGRlYmMtOGY3MzdjYjctODM1MjQ2NmM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:35.303791Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5570:4779]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:35.304117Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:35.304186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5570:4779], StatRequests.size() = 1 2026-02-19T08:26:35.444320Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5560:4769], ActorId: [2:5561:4770], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmI2NjgwZTAtNmQ2NGRlYmMtOGY3MzdjYjctODM1MjQ2NmM=, TxId: 2026-02-19T08:26:35.444409Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5560:4769], ActorId: [2:5561:4770], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmI2NjgwZTAtNmQ2NGRlYmMtOGY3MzdjYjctODM1MjQ2NmM=, TxId: 2026-02-19T08:26:35.444791Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5559:4768], ActorId: [2:5560:4769], Got response [2:5561:4770] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:26:35.445232Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5583:4785]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:35.445535Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:35.445599Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:35.445823Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:35.446807Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:35.446854Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:35.446914Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:35.450542Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> TxUsage::Write_And_Read_Small_Messages_1 |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:27.152697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:27.245804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:27.259826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:27.259882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:27.260314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:27.527973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:27.528105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:27.609684Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489584402239 != 1771489584402243 2026-02-19T08:26:27.618405Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:27.663926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:27.795886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:28.118938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:28.118996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:28.119053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:28.119409Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:28.131645Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:29.433579Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.433674Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:29.433929Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:29.434012Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:29.434256Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:26:29.434390Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.434544Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:26:29.434594Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:29.434856Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:29.435328Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1229:2780] 2026-02-19T08:26:29.435375Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1229:2780], channels: 0 2026-02-19T08:26:29.435486Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:29.435544Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:29.435591Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1229:2780] 2026-02-19T08:26:29.435639Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1229:2780], channels: 0 2026-02-19T08:26:29.435716Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1229:2780], trace_id# 2026-02-19T08:26:29.435768Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1229:2780], trace_id# 2026-02-19T08:26:29.435830Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:29.447383Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1229:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:29.447500Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1229:2780], trace_id# 2026-02-19T08:26:29.447565Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1229:2780], trace_id# 2026-02-19T08:26:29.448686Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1229:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1678 Tasks { TaskId: 1 CpuTimeUs: 1013 FinishTimeMs: 1771489589448 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 32 BuildCpuTimeUs: 981 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489589436 UpdateTimeMs: 1771489589448 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:29.448775Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWM1MDExY2QtMWI3ZmE0YjEtZjE4ZGUxYTAtMTNiZTQ3NWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1229:2780] 2026-02-19T08:26:29.448855Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1226:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3y1eae5hazt8krazxwns, Database: /Root, SessionId: ydb ... cuter tasks construction 2026-02-19T08:26:36.310883Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:7608489880873985503:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# PrepareState ComputeActor# [2:7608489880873985636:2804] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 12719 Tasks { TaskId: 1 CpuTimeUs: 616 FinishTimeMs: 1771489596310 Tables { TablePath: "/Root/TestOlap" WriteRows: 3 WriteBytes: 704 AffectedPartitions: 3 } EgressBytes: 960 EgressRows: 3 ComputeCpuTimeUs: 28 BuildCpuTimeUs: 588 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489596275 UpdateTimeMs: 1771489596310 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpTaskExtraStats" value: "\022\000" } } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.310981Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7608489880873985636:2804] 2026-02-19T08:26:36.311098Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:254} ActorId: [2:7608489880873985503:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Send Flush to BufferActor buffer_actor_id# [2:7608489880873985502:2326] trace_id# 2026-02-19T08:26:36.311145Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489880873985503:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.012719s ReadRows: 0 ReadBytes: 0 RequestUnits# 8 ForceFlag# true trace_id# 2026-02-19T08:26:36.311366Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489880873985503:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:36.311406Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489880873985503:2326] TxId: 281474976710661. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:36.311625Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:36.311911Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:36.312063Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710662. Resolved key sets: 0 2026-02-19T08:26:36.312130Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:36.312195Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:36.312223Z node 2 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:36.312265Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7608489880873985502:2326] trace_id# 2026-02-19T08:26:36.312307Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:26:36.322462Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:36.322492Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489880873985670:2326] TxId: 281474976710662. Ctx: { TraceId: 01khtg44s56kmhs47ztxerfsfm, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:36.640061Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:36.640118Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:26:36.640157Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:36.640204Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Nothing (OptionalType (DataType 'Int32))) $1) '((Just (Int32 '"3")) $1))))))))))) )))) ) trace_id# 2026-02-19T08:26:36.640255Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:26:36.641669Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:26:36.653492Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:36.653572Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 7199.676330s cancel_after# trace_id# 2026-02-19T08:26:36.653597Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:36.653632Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Int32 '0)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Nothing (OptionalType (DataType 'Int32))) $1) '((Just (Int32 '"3")) $1))))))))))) )))) ) trace_id# 2026-02-19T08:26:36.653695Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:531: Create result channelId: 1 from task: 1 with index: 0 2026-02-19T08:26:36.654315Z node 2 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:300} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-02-19T08:26:36.654634Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:36.654795Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:7608489880873985710:2326] TxId: 281474976710663. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:7608489880873985709:2326] trace_id# 2026-02-19T08:26:36.654898Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489880873985710:2326] TxId: 281474976710663. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:36.654915Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489880873985710:2326] TxId: 281474976710663. Ctx: { TraceId: 01khtg44x9d0gw3fppbze46eya, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTU2YWM0ZDMtYzcxMzljNmEtYzEzYjliYjQtNTg1ZmE4NzU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: 2026-02-19T08:26:04.362383Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489743315793405:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:04.362724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:04.491851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:04.500201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:04.500306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:04.555084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.576466Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:04.577202Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489743315793374:2081] 1771489564361163 != 1771489564361166 2026-02-19T08:26:04.614253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:04.614277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:04.614286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:04.614352Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:04.697493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:06.462258Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006ae/r3tmp/spilling-tmp-runner/node_1_edc21fea-a3f12ddd-ccc6731-71eacdc7, actor: [1:7608489751905729803:2384] 2026-02-19T08:26:06.462431Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006ae/r3tmp/spilling-tmp-runner 2026-02-19T08:26:08.150948Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.150985Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151002Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151016Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151048Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151072Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151092Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151120Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151146Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151167Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151187Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:08.151233Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg388yagmkjh9fzthatkgh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTY4MDk5NGEtNjI5YWIxYi04OTk3YWI2Ni1iYWY2NDNjZA==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:08.151374Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489760495665468:2400] TxId: 281474976715672. Ctx: { TraceId: 01khtg388yagmkjh9fzthatkgh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTY4MDk5NGEtNjI5YWIxYi04OTk3YWI2Ni1iYWY2NDNjZA==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:08.151409Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489760495665468:2400] TxId: 281474976715672. Ctx: { TraceId: 01khtg388yagmkjh9fzthatkgh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTY4MDk5NGEtNjI5YWIxYi04OTk3YWI2Ni1iYWY2NDNjZA==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:08.151688Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:26:08.151829Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.151865Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.151890Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.151926Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.151987Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.152023Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.152054Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.152089Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.152142Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:08.152173Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Group ... 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564385:2526], CA [5:7608489883643564392:2533], CA [5:7608489883643564386:2527], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], CA [5:7608489883643564384:2525], trace_id# 2026-02-19T08:26:36.926957Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:7608489883643564385:2526] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 687 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 348 FinishTimeMs: 1771489596924 EgressBytes: 30 EgressRows: 2 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 338 HostName: "ghrun-fkrokuvqke" NodeId: 5 CreateTimeMs: 1771489596919 UpdateTimeMs: 1771489596924 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.926975Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [5:7608489883643564385:2526] 2026-02-19T08:26:36.927026Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564386:2527], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], CA [5:7608489883643564384:2525], trace_id# 2026-02-19T08:26:36.927058Z node 5 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 8 compute actor(s) and 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564386:2527], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], CA [5:7608489883643564384:2525], trace_id# 2026-02-19T08:26:36.927343Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:7608489883643564384:2525] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1493 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 986 FinishTimeMs: 1771489596924 EgressBytes: 528 EgressRows: 24 ComputeCpuTimeUs: 27 BuildCpuTimeUs: 959 HostName: "ghrun-fkrokuvqke" NodeId: 5 CreateTimeMs: 1771489596918 UpdateTimeMs: 1771489596924 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.927365Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [5:7608489883643564384:2525] 2026-02-19T08:26:36.927400Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564386:2527], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.927430Z node 5 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 7 compute actor(s) and 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564386:2527], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.927510Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:7608489883643564386:2527] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 608 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 270 FinishTimeMs: 1771489596924 EgressBytes: 20 EgressRows: 2 ComputeCpuTimeUs: 11 BuildCpuTimeUs: 259 HostName: "ghrun-fkrokuvqke" NodeId: 5 CreateTimeMs: 1771489596919 UpdateTimeMs: 1771489596924 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.927528Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [5:7608489883643564386:2527] 2026-02-19T08:26:36.927562Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.927589Z node 5 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 6 compute actor(s) and 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564387:2528], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.927934Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:7608489883643564387:2528] TaskId# 6 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 713 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 357 FinishTimeMs: 1771489596924 EgressBytes: 78 EgressRows: 3 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 345 HostName: "ghrun-fkrokuvqke" NodeId: 5 CreateTimeMs: 1771489596919 UpdateTimeMs: 1771489596924 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.927957Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [5:7608489883643564387:2528] 2026-02-19T08:26:36.927990Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.928019Z node 5 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564388:2529], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.928103Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:7608489883643564388:2529] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 904 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 541 FinishTimeMs: 1771489596924 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 526 HostName: "ghrun-fkrokuvqke" NodeId: 5 CreateTimeMs: 1771489596920 UpdateTimeMs: 1771489596924 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:36.928119Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [5:7608489883643564388:2529] 2026-02-19T08:26:36.928147Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# 2026-02-19T08:26:36.928170Z node 5 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [5:7608489883643564379:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg43pf0ymk71szhnsc0xhr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzQzOGJhMDUtZTRmMGVhMzAtOTk0ZDY2MzItYmMyNGEyODY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [5:7608489883643564391:2532], CA [5:7608489883643564392:2533], CA [5:7608489883643564389:2530], CA [5:7608489883643564390:2531], trace_id# >> KqpBatchPEA::AbortState_AbortFromExecuterActor [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount >> KqpComputeScheduler::AddUpdateQueries+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::AddUpdatePools+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::AddUpdatePools-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::AddUpdateDeleteNonExistent [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::AddUpdateDeleteNonExistent [GOOD] >> KqpBatchPEA::ExecuteState_ChildExecuterRetryLimitExceeded [GOOD] >> KqpBatchPEA::ExecuteState_ChildExecuterInternalError >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::AbortState_AbortFromExecuterActor [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:30.904519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:31.019684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:31.036873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:31.036950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:31.037461Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:31.330496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:31.330596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:31.394469Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489587999396 != 1771489587999400 2026-02-19T08:26:31.403834Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:31.449996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:31.573545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:31.912378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:31.912454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:31.912513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:31.912898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:31.925390Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:33.288808Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.288926Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:33.289176Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:33.289251Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:33.289475Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:26:33.289700Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:33.289912Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:26:33.289965Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:33.290159Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:33.290587Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1226:2780] 2026-02-19T08:26:33.290674Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1226:2780], channels: 0 2026-02-19T08:26:33.290755Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:33.290804Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:33.290853Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1226:2780] 2026-02-19T08:26:33.290896Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1226:2780], channels: 0 2026-02-19T08:26:33.290975Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1226:2780], trace_id# 2026-02-19T08:26:33.291031Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1226:2780], trace_id# 2026-02-19T08:26:33.291091Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:33.303646Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1226:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:33.303762Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1226:2780], trace_id# 2026-02-19T08:26:33.303823Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1226:2780], trace_id# 2026-02-19T08:26:33.305064Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1226:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1926 Tasks { TaskId: 1 CpuTimeUs: 1101 FinishTimeMs: 1771489593304 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 42 BuildCpuTimeUs: 1059 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489593291 UpdateTimeMs: 1771489593304 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:33.305141Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWUzNzBhNTEtN2JkNjIwOTgtZGZmMzQzMWEtODUwZjM3YTg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1226:2780] 2026-02-19T08:26:33.305246Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1223:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg41sk7p8e6ek1z656awtf, Database: /Root, SessionId: yd ... EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1320:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.064994Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1320:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:39.065090Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1316:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065148Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1316:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065228Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1316:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1313:3053] trace_id# 2026-02-19T08:26:39.065275Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1316:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.065302Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1316:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:39.065372Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1322:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065411Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1322:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065481Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1322:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1313:3053] trace_id# 2026-02-19T08:26:39.065528Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1322:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.065562Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1322:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:39.065618Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1318:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065675Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1318:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:39.065802Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1318:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1313:3053] trace_id# 2026-02-19T08:26:39.065851Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1318:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.065895Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1318:3053] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:39.066068Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1320:3053] PartitionIndex# 2 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:39.066138Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 3 TotalPartitions# 4 2026-02-19T08:26:39.066295Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Got tx response. Sender# [2:1316:3053] PartitionIndex# 0 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:39.066329Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:26:39.066432Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Got tx response. Sender# [2:1322:3053] PartitionIndex# 3 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:39.066475Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:26:39.066585Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [2:1318:3053] PartitionIndex# 1 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:39.066676Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1313:3053], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# ABORTED Issues# [ {
: Error: Test abort before any response } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:26:39.067208Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:39.067542Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1326:3053] TxId: 281474976715669. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1325:3053] trace_id# 2026-02-19T08:26:39.067764Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1326:3053] TxId: 281474976715669. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.067821Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1326:3053] TxId: 281474976715669. Ctx: { TraceId: 01khtg47ejbarcp4jxrdgs4ngg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=OTc0NDQ1MDEtZjNhMTFjZWUtNzMyNGMyYWUtMjRiOTVmODM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> KqpComputeScheduler::SingleDatabasePoolQueryStructure+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::QueriesWithFairShareNoOverlimit [GOOD] >> KqpComputeScheduler::QueriesWithFairShareOverlimit [GOOD] >> KqpComputeScheduler::QueriesWithFairShareEqualAll [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] Test command err: 2026-02-19T08:22:40.817734Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488870220101334:2195];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.817859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.851159Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:41.115229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.140396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.140493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.202783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.267310Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.269479Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488870220101158:2081] 1771489360799883 != 1771489360799886 2026-02-19T08:22:41.339939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f3f/r3tmp/yandexhXI0Yd.tmp 2026-02-19T08:22:41.339980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f3f/r3tmp/yandexhXI0Yd.tmp 2026-02-19T08:22:41.352890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f3f/r3tmp/yandexhXI0Yd.tmp 2026-02-19T08:22:41.353227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.355959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.370901Z INFO: TTestServer started on Port 17842 GrpcPort 15862 PQClient connected to localhost:15862 2026-02-19T08:22:41.566211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.581801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:22:41.634838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.820943Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:44.023034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488887399971394:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.023121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488887399971385:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.023242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.025221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488887399971402:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.025354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.028399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:44.042691Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488887399971400:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:22:44.271834Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488887399971466:2641] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:44.304596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.346355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.429267Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488887399971474:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:44.431687Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=OTkzYzk2YTktZjViNjJmMzMtOTlmNzNkNGQtOWEwOTdhYjA=, ActorId: [1:7608488887399971380:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfx21n73ckv03phr477bdt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:44.434180Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:22:44.436418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488887399971750:2819] 2026-02-19T08:22:45.818256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488870220101334:2195];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.818322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:50.926492Z :WriteToTopic_Demo_16_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:22:50.964812Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:50.995007Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488913169775755:2928] connected; active server actors: 1 2026-02-19T08:22:50.995239Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:51.006368Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:51.006558Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:51.027724Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:51.028854Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:51.029087Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:22:51.029133Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:51.029171Z node 1 :PERSQUEUE INFO: pq_impl.cpp:61 ... ingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.585289Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.585311Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.585341Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.620020Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.620054Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.620067Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.620089Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.620107Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.685477Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.685518Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.685551Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.685575Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.685595Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.720352Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.720393Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.720407Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.720423Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.720437Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.785920Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.785974Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.785995Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.786020Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.786039Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.820706Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.820749Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.820769Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.820791Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.820808Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.856875Z :INFO: [/Root] [/Root] [b86cec78-fb09b4b1-358d758e-3d9e167e] Closing read session. Close timeout: 0.000000s 2026-02-19T08:26:36.856925Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2026-02-19T08:26:36.856979Z :INFO: [/Root] [/Root] [b86cec78-fb09b4b1-358d758e-3d9e167e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2002 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:26:36.857096Z :NOTICE: [/Root] [/Root] [b86cec78-fb09b4b1-358d758e-3d9e167e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:26:36.857146Z :DEBUG: [/Root] [/Root] [b86cec78-fb09b4b1-358d758e-3d9e167e] [] Abort session to cluster 2026-02-19T08:26:36.857668Z :NOTICE: [/Root] [/Root] [b86cec78-fb09b4b1-358d758e-3d9e167e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:26:36.858245Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_5184935083750423912_v1 grpc read done: success# 0, data# { } 2026-02-19T08:26:36.858288Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_5184935083750423912_v1 grpc read failed 2026-02-19T08:26:36.858326Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_15_1_5184935083750423912_v1 grpc closed 2026-02-19T08:26:36.858383Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_5184935083750423912_v1 is DEAD 2026-02-19T08:26:36.858969Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_5184935083750423912_v1 2026-02-19T08:26:36.859016Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489873256809690:2532] destroyed 2026-02-19T08:26:36.859063Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_5184935083750423912_v1 2026-02-19T08:26:36.859423Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608489873256809687:2529] disconnected. 2026-02-19T08:26:36.859463Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608489873256809687:2529] disconnected; active server actors: 1 2026-02-19T08:26:36.859506Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608489873256809687:2529] client test-consumer disconnected session test-consumer_15_1_5184935083750423912_v1 2026-02-19T08:26:36.860668Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-02-19T08:26:36.860787Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0] PartitionId [0] Generation [2] Write session will now close 2026-02-19T08:26:36.860833Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0] PartitionId [0] Generation [2] Write session: aborting 2026-02-19T08:26:36.861197Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:36.861254Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0] PartitionId [0] Generation [2] Write session: destroy 2026-02-19T08:26:36.861701Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0 grpc read done: success: 0 data: 2026-02-19T08:26:36.861755Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0 grpc read failed 2026-02-19T08:26:36.862144Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0 2026-02-19T08:26:36.862171Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|8af80461-a007fb7d-56a8fc37-2e30311d_0 is DEAD 2026-02-19T08:26:36.862580Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:36.863144Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489868961842331:2508] destroyed 2026-02-19T08:26:36.863219Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:36.863277Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.863305Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.863328Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.863378Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.863403Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.886214Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.886266Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.886284Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.886304Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.886320Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:36.921843Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:36.921882Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.921902Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:36.921924Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:36.921952Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: 2026-02-19T08:26:28.431351Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489847939423278:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:28.434294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:28.631997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:28.657530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:28.657684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:28.686035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:28.722710Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:28.724517Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489847939423247:2081] 1771489588429424 != 1771489588429427 2026-02-19T08:26:28.792378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:28.792407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:28.792417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:28.792528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:28.857027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:31.471879Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/000616/r3tmp/spilling-tmp-runner/node_1_29ca78ba-dc04cb6c-cc013318-126fff0, actor: [1:7608489860824326973:2384] 2026-02-19T08:26:31.472077Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/000616/r3tmp/spilling-tmp-runner 2026-02-19T08:26:33.639679Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639716Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639725Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639734Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639744Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639754Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639763Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639774Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639785Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639797Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639806Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:33.639858Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:108} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-02-19T08:26:33.639892Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:142} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-02-19T08:26:33.639939Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 '('"id" (Just (Int64 '0)))) (let $2 (Uint64 '0)) (let $3 '('"k2" (Nothing (OptionalType (DataType 'Uint64))))) (let $4 (Just $2)) (let $5 (AsStruct $1 '('"k1" $2) $3 '('"v1" $4) '('"v2" (Just (String '"0NULL"))))) (let $6 '('"id" (Just (Int64 '2)))) (let $7 (Uint64 '1)) (let $8 '('"k1" $7)) (let $9 (Just $7)) (let $10 '('"v1" $9)) (let $11 (AsStruct $6 $8 $3 $10 '('"v2" (Just (String '"1NULL"))))) (let $12 '('"k2" $4)) (let $13 (AsStruct $1 $8 $12 $10 '('"v2" (Just (String '10))))) (let $14 '('"id" (Just (Int64 '1)))) (let $15 '('"k2" $9)) (let $16 (Uint64 '2)) (let $17 (Just $16)) (let $18 '('"v1" $17)) (let $19 (AsStruct $14 $8 $15 $18 '('"v2" (Just (String '11))))) (let $20 '('"k2" $17)) (let $21 (Uint64 '3)) (let $22 (Just $21)) (let $23 '('"v1" $22)) (let $24 (AsStruct $6 $8 $20 $23 '('"v2" (Just (String '12))))) (let $25 '('"k2" $22)) (let $26 (Uint64 '4)) (let $27 (Just $26)) (let $28 '('"v1" $27)) (let $29 (AsStruct $1 $8 $25 $28 '('"v2" (Just (String '13))))) (let $30 '('"k2" $27)) (let $31 (Uint64 '5)) (let $32 (Just $31)) (let $33 '('"v1" $32)) (let $34 (AsStruct $14 $8 $30 $33 '('"v2" (Just (String '"14"))))) (let $35 '('"k1" $16)) (let $36 (AsStruct $14 $35 $3 $18 '('"v2" (Just (String '"2NULL"))))) (let $37 (AsStruct $6 $35 $12 $18 '('"v2" (Just (String '"20"))))) (let $38 (AsStruct $1 $35 $15 $23 '('"v2" (Just (String '"21"))))) (let $39 (AsStruct $14 $35 $20 $28 '('"v2" (Just (String '"22"))))) (let $40 (AsStruct $6 $35 $25 $33 '('"v2" (Just (String '"23"))))) (let $41 (Uint64 '6)) (let $42 '('"v1" (Just $41))) (let $43 (AsStruct $1 $35 $30 $42 '('"v2" (Just (String '"24"))))) (let $44 '('"k1" $21)) (let $45 (AsStruct $1 $44 $3 $23 '('"v2" (Just (String '"3NULL"))))) (let $46 (AsStruct $14 $44 $12 $23 '('"v2" (Just (String '"30"))))) (let $47 (AsStruct $6 $44 $15 $28 '('"v2" (Just (String '"31"))))) (let $48 (AsStruct $1 $44 $20 $33 '('"v2" (Just (String '"32"))))) (let $49 (AsStruct $14 $44 $25 $42 '('"v2" (Just (String '"33"))))) (let $50 (Uint64 '7)) (let $51 '('"v1" (Just $50))) (let $52 (AsStruct $6 $44 $30 $51 '('"v2" (Just (String '"34"))))) (let $53 '('"k1" $26)) (let $54 (AsStruct $6 $53 $3 $28 '('"v2" (Just (String '"4NULL"))))) (let $55 (AsStruct $1 $53 $12 $28 '('"v2" (Just (String '"40"))))) (let $56 (AsStruct $14 $53 $15 $33 '('"v2" (Just (String '"41"))))) (let $57 (AsStruct $6 $53 $20 $42 '('"v2" (Just (String '"42"))))) (let $58 (AsStruct $1 $53 $25 $51 '('"v2" (Just (String '"43"))))) (let $59 (Uint64 '8)) (let $60 '('"v1" (Just $59))) (let $61 (AsStruct $14 $53 $30 $60 '('"v2" (Just (String '"44"))))) (let $62 '('"k1" $31)) (let $63 (AsStruct $14 $62 $3 $33 '('"v2" (Just (String '"5NULL"))))) (let $64 (AsStruct $6 $62 $12 $33 '('"v2" (Just (String '"50"))))) (let $65 (AsStruct $1 $62 $15 $42 '('"v2" (Just (String '"51"))))) (let $66 (AsStruct $14 $62 $20 $51 '('"v2" (Just (String '"52"))))) (let $67 (AsStruct $6 $62 $25 $60 '('"v2" (Just (String '"53"))))) (let $68 (Uint64 '9)) (let $69 '('"v1" (Just $68))) (let $70 (AsStruct $1 $62 $30 $69 '('"v2" (Just (String '"54"))))) (let $71 '('"k1" $41)) (let $72 (AsStruct $1 $71 $3 $42 '('"v2" (Just (String '"6NULL"))))) (let $73 (AsStruct $14 $71 $12 $42 '('"v2" (Just (String '"60"))))) (let $74 (AsStruct $6 $71 $15 $51 '('"v2" (Just (String '"61"))))) (let $75 (AsStruct $1 $71 $20 $60 '('"v2" (Just (String '"62"))))) (let $76 (AsStruct $14 $71 $25 $69 '('"v2" (Just (String '"63"))))) (let $77 '('"v1" (Just (Uint64 '10)))) (let $78 (AsStruct $6 $71 $30 $77 '('"v2" (Just (String '"64"))))) (let $79 '('"v1" (Just (Uint64 '11)))) (let $80 (AsStruct $1 $71 '('"k2" $32) $79 '('"v2" (Just (String '"65"))))) (let $81 '('"k1" $50)) (let $82 (AsStruct $6 $81 $3 $51 '('"v2" (Just (String '"7NULL"))))) (let $83 (AsStruct $1 $81 $12 $51 '('"v2" (Just (String '"70"))))) (let $84 (AsStruct $14 $81 $15 $60 '('"v2" (Just (String '"71"))))) (let $85 (AsStruct $6 $81 $20 $69 '('"v2" (Just (String '"72"))))) (let $86 (AsStruct $1 $81 $25 $77 '('"v2" (Just (String '"73"))))) (let $87 (AsStruct $14 $81 $30 $79 '('"v2" (Just (String '"74"))))) (let $88 '('"k1" $59)) (let $89 (AsStruct $14 $88 $3 $60 '('"v2" (Just (String '"8NULL"))))) (let $90 (AsStruct $6 $88 $12 $60 '('"v2" (Just (String '"80"))))) (let $91 (AsStruct $1 $88 $15 $69 '('"v2" (Just (String '"81"))))) (let $92 (AsStruct $14 $88 $20 $77 '('"v2" (Just (String '"82"))))) (let $93 (AsStruct $6 $88 $25 $79 '('"v2" (Just (String '"83"))))) (let $94 '('"v1" (Just (Uint64 '12)))) (let $95 (AsStruct $1 $88 $30 $94 '('"v2" (Just (String '"84"))))) (let $96 '('"k1" $68)) (let $97 (AsStruct $1 $96 $3 $69 '('"v2" (Just (String '"9NULL"))))) (let $98 (AsStruct $14 $96 $12 $69 '('"v2" (Just (String '"90"))))) (let $99 (AsStruct $6 $96 $15 $77 '('"v2" (Just (String '"91"))))) (let $100 (AsStruct $1 $96 $20 $79 '('"v2" (Just (String '"92"))))) (let $101 (AsStruct $14 $96 $25 $94 '('"v2" (Just (String '"93"))))) (let $102 (AsStruct $6 $96 $30 '('"v1" (Just (Uint64 '13))) '('"v2" (Just (String '"94"))))) (return (ToStream (Just (AsList $5 $11 $13 $19 $24 $29 $34 $36 $37 $38 $39 $40 $43 $45 $46 $47 $48 $49 $52 $54 $55 $56 $57 $58 $61 $63 $64 $65 $66 $67 $70 $72 $73 $74 $75 $76 $78 $80 $82 $83 $84 $85 $86 $87 $89 $90 $91 $92 $93 $95 $97 $98 $99 $100 $101 $102)))) )))) ) trace_id# 2026-02-19T08:26:33.639984Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:153} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,1] ast# ( (return (lambda '() (block '( (let $1 '('"Col1" (Just (Uint32 '0)))) (let $2 '('"Col2" (Just (Uint64 '1)))) (let $3 (Just (Int64 '0))) (let $4 '('"Col3" $3)) (let $5 (Just (Int64 '3))) (let $6 (AsStruct $1 $2 $4 '('"Col4" $5))) (let $7 '('"Col1" (Just (Uint32 '1)))) (let $8 (Just (Int64 '1))) (let $9 '('"Col4" $8)) (let $10 (AsStruct $7 $2 $4 $9)) (let $11 '('"Col3" $8)) (let $12 '('"Col4" $3)) (let $13 (AsStruct $7 $2 $11 $12)) (let $14 (Just (Int64 '2))) (let $15 '('"Col3" $14)) (let $16 (AsStruct $7 $2 $15 $9)) (let $17 '('"Col1" (Just (Uint32 '2)))) (let $18 '('"Col4" $14)) (let $19 (AsStruct $17 $2 $4 $18)) (let $20 '('"Col2" (Just (Uint64 '2)))) (let $21 (AsStruct $7 $20 $4 $9)) (let $22 (AsStruct $7 $20 $11 $12)) (let $23 (AsStruct $17 $20 $4 $9)) (let $24 '('"Col1" (Just (Uint32 '3)))) (let $25 (AsStruct $24 $20 $11 '('"Col4" (Just (Int64 '5))))) (let $26 '('"Col2" (Just (Uint64 '3)))) (let $27 (AsStruct $1 $26 $4 $9)) (let $28 (AsStruct $7 $26 '('"Col3" $5) $12)) (let $29 (AsStruct $17 $26 $4 $9)) (let $30 (AsStruct $1 $26 $15 '('"Col4" (Just (Int64 '4))))) (let $31 (AsStruct $7 $26 $11 $9)) (let $32 (AsStruct $17 $26 $11 $18)) (let $33 (AsStruct $24 ... shard_id# 72075186224037919 state# Executing trace_id# 2026-02-19T08:26:39.198753Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037921 state# Executing trace_id# 2026-02-19T08:26:39.198767Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:26:39.198776Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:26:39.198805Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 5 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037919 (Executing), DS 72075186224037921 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:26:39.198825Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037919 status# COMPLETE error# trace_id# 2026-02-19T08:26:39.198854Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:26:39.198868Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037921 state# Executing trace_id# 2026-02-19T08:26:39.198876Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:26:39.198884Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:26:39.198904Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037921 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:26:39.198925Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037921 status# COMPLETE error# trace_id# 2026-02-19T08:26:39.198940Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:26:39.198954Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037922 state# Executing trace_id# 2026-02-19T08:26:39.198971Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:26:39.198985Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 3 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037922 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:26:39.199003Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037922 status# COMPLETE error# trace_id# 2026-02-19T08:26:39.199023Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:26:39.199032Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037923 state# Executing trace_id# 2026-02-19T08:26:39.199062Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 2 datashard(s): DS 72075186224037924 (Executing), DS 72075186224037923 (Executing), trace_id# 2026-02-19T08:26:39.199082Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037923 status# COMPLETE error# trace_id# 2026-02-19T08:26:39.199101Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:148} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Datashard not finished yet actor_state# ExecuteState shard_id# 72075186224037924 state# Executing trace_id# 2026-02-19T08:26:39.199118Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037924 (Executing), trace_id# 2026-02-19T08:26:39.199141Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1414} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037924 status# COMPLETE error# trace_id# 2026-02-19T08:26:39.199229Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:39.199249Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:39.199271Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:7608489896911633329:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg465decysr4mn7hv23mv0, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NmMxYTQwNzAtODI5YjhhNGQtZDhhNmI2ODctMjBjZDhmNmM=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> KqpComputeScheduler::ZeroQueries [GOOD] >> KqpComputeScheduler::ZeroLimits [GOOD] >> KqpComputeScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> KqpComputeSchedulerService::EnabledSchedulerWithDisabledWorkloadManager |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::QueriesWithFairShareEqualAll [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] >> KqpComputeScheduler::WeightedPools+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::WeightedPools-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::WeightedQueries [GOOD] >> KqpComputeScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> KqpComputeScheduler::DemandIsCutOffByLimit [GOOD] >> KqpComputeScheduler::AddUpdateQueries-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::DeleteQueries+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::DeleteQueries-DefaultFairShareMode [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::DeleteQueries-DefaultFairShareMode [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> KqpComputeSchedulerService::FeatureFlagOnStart+Enabled >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> TKqpScanData::UnboxedValueSize >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TKqpScanFetcher::ScanDelayedRetry >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> KqpComputeScheduler::SingleDatabasePoolQueryStructure-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::WeightedDatabase+DefaultFairShareMode [GOOD] >> KqpComputeScheduler::WeightedDatabase-DefaultFairShareMode [GOOD] >> KqpComputeScheduler::StressTest >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanFetcher::ScanDelayedRetry [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:50 :META:Reads { ShardId: 1001001 KeyRanges { } } 2026-02-19T08:26:43.115762Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-02-19T08:26:43.115834Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-02-19T08:26:43.126248Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-02-19T08:26:43.126336Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] >> TExternalDataSourceTest::ReadOnlyMode >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:59.498139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:59.566010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:59.570068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:59.570298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:59.570403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:59.837985Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:59.914503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:59.914628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:59.972478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:00.087599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:00.732425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:00.732466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:00.732489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:00.734733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:00.735069Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:00.798274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:01.286267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:03.785432Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:03.790335Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:03.792949Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:03.814253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:03.814340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:03.851806Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:03.853368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.039637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:04.039728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:04.040914Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.041503Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.042247Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.042642Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.042802Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.042882Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.043034Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.043157Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.043235Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.057786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.219386Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:04.257394Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:04.257497Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:04.293476Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:04.293659Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:04.293842Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:04.293906Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:04.293946Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:04.294040Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:04.294083Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:04.294122Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:04.294546Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:04.299098Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:04.299218Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:04.321995Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2182:2627] 2026-02-19T08:26:04.322811Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2182:2627], schemeshard id = 72075186224037897 2026-02-19T08:26:04.384884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2267:2669] 2026-02-19T08:26:04.386752Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:04.396458Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Describe result: PathErrorUnknown 2026-02-19T08:26:04.396524Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Creating table 2026-02-19T08:26:04.396602Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:04.401542Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2325:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:04.404771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:04.411752Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:04.411888Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:04.423845Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:04.638940Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:04.658412Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:04.953146Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:05.082705Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:05.082790Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2274:2674] Owner: [2:2273:2673]. Column diff is empty, finishing 2026-02-19T08:26:05.780578Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... oaded traversal table owner id: 72075186224037897 2026-02-19T08:26:41.319384Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-02-19T08:26:41.319413Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1771489601194542 2026-02-19T08:26:41.319443Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-02-19T08:26:41.319469Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-02-19T08:26:41.319494Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-02-19T08:26:41.319563Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-02-19T08:26:41.319645Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:41.319758Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-02-19T08:26:41.319825Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:41.319891Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:41.319959Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:41.320105Z node 2 :STATISTICS DEBUG: tx_init.cpp:300: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:41.321221Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:41.321419Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5573:4776] Owner: [2:5572:4775]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:41.321463Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5573:4776] Owner: [2:5572:4775]. Column diff is empty, finishing 2026-02-19T08:26:41.322070Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:41.322161Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:41.323302Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:41.323384Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:41.326112Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:41.356595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5582:4783] 2026-02-19T08:26:41.356758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5538:4755], server id = [2:5582:4783], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:41.357036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5582:4783], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-02-19T08:26:41.357498Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5583:4784] 2026-02-19T08:26:41.357703Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5583:4784], schemeshard id = 72075186224037897 2026-02-19T08:26:41.409145Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:41.409323Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-02-19T08:26:41.410286Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5588:4789], server id = [2:5592:4793], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:41.410826Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5588:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:41.412095Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5589:4790], server id = [2:5593:4794], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:41.412171Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5589:4790], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:41.412621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5590:4791], server id = [2:5594:4795], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:41.412681Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5590:4791], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:41.413218Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5591:4792], server id = [2:5595:4796], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:41.413273Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5591:4792], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:41.419148Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:41.419878Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5588:4789], server id = [2:5592:4793], tablet id = 72075186224037899 2026-02-19T08:26:41.419926Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:41.420974Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:41.421375Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5589:4790], server id = [2:5593:4794], tablet id = 72075186224037900 2026-02-19T08:26:41.421407Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:41.422121Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:41.422410Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5590:4791], server id = [2:5594:4795], tablet id = 72075186224037901 2026-02-19T08:26:41.422441Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:41.422813Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:41.422870Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:41.423057Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:41.423210Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:41.423594Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5608:4805], ActorId: [2:5609:4806], Starting query actor #1 [2:5610:4807] 2026-02-19T08:26:41.423660Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5609:4806], ActorId: [2:5610:4807], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:41.426585Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5591:4792], server id = [2:5595:4796], tablet id = 72075186224037902 2026-02-19T08:26:41.426621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:41.427383Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5609:4806], ActorId: [2:5610:4807], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWZhZGI3NDMtYjE0ZGM4ZjctMjhiNWYyZS1mNGJiMWRlMg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:41.537399Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5619:4816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:41.537770Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:41.537826Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5619:4816], StatRequests.size() = 1 2026-02-19T08:26:41.687611Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5609:4806], ActorId: [2:5610:4807], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWZhZGI3NDMtYjE0ZGM4ZjctMjhiNWYyZS1mNGJiMWRlMg==, TxId: 2026-02-19T08:26:41.687699Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5609:4806], ActorId: [2:5610:4807], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWZhZGI3NDMtYjE0ZGM4ZjctMjhiNWYyZS1mNGJiMWRlMg==, TxId: 2026-02-19T08:26:41.688123Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5608:4805], ActorId: [2:5609:4806], Got response [2:5610:4807] SUCCESS 2026-02-19T08:26:41.688574Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:41.704023Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:41.704101Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:41.794341Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5638:4824]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:41.794772Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:41.794828Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:41.795181Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:41.795220Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:41.795277Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:41.799054Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> KqpBatchDelete::Large_1 [GOOD] >> KqpBatchDelete::HasTxControl >> AnalyzeDatashard::TrickyTableAndColumnNames [GOOD] >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> KqpComputeSchedulerService::EnabledSchedulerWithDisabledWorkloadManager [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:42.303953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:42.381276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.385481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.385818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.385996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.724176Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.808180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.808286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.864604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.910952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.584513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.584569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.584602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.591622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.592195Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.658593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:44.211033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.299789Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.307174Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.310064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.333311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.333401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.371961Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.374096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.556515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.556644Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.558330Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.558941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.559565Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.559994Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.560317Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.560485Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.560709Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.560842Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.560922Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.576398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.775805Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.825673Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.825818Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.862855Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.863036Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.863193Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.863241Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.863285Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.863325Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.863362Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.863397Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.863770Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.867830Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.867932Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.888595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2629] 2026-02-19T08:25:47.889611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2185:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:47.953489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:25:47.955653Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.964691Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.964755Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:47.964850Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.971484Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.975430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.982392Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.982525Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.995726Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:48.212150Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.248895Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.507003Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.644457Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.644527Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:49.389555Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ry_actor.h:311: [TQueryRetryActor] OwnerId: [2:4573:3933], ActorId: [2:4576:3935], Got response [2:4579:3937] SUCCESS 2026-02-19T08:26:30.279053Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:30.292458Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:30.292564Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:56: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId "operationId" , ActorId=[1:3365:3494] 2026-02-19T08:26:30.336892Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:26:30.336978Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:26:30.412078Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:4405:3803], schemeshard count = 1 2026-02-19T08:26:31.449486Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:26:31.449746Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 35], AnalyzeActorId: [2:4635:3974] 2026-02-19T08:26:31.453467Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:26:31.456688Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table2` 2026-02-19T08:26:31.456809Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], Start read next stream part 2026-02-19T08:26:31.499232Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4662:3988]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:31.499491Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:26:31.499542Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [2:4662:3988], StatRequests.size() = 1 2026-02-19T08:26:38.950565Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:26:38.950713Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], Start read next stream part 2026-02-19T08:26:38.951893Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:26:38.951942Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4645:3978], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODI4M2U3NjYtZGMxYTc3ZTMtMjZmZGViNzgtN2MyYWUzNDk=, TxId: 2026-02-19T08:26:38.952020Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33090, txId: 281474976720666] shutting down 2026-02-19T08:26:38.952085Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:26:38.955794Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-02-19T08:26:38.955924Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], Start read next stream part 2026-02-19T08:26:39.009933Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4754:4060]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:39.010172Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-02-19T08:26:39.010217Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 5, ReplyToActorId = [2:4754:4060], StatRequests.size() = 1 2026-02-19T08:26:43.161530Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:26:43.161678Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], Start read next stream part 2026-02-19T08:26:43.163210Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4818:4108], ActorId: [2:4819:4109], Starting query actor #1 [2:4820:4110] 2026-02-19T08:26:43.163279Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4819:4109], ActorId: [2:4820:4110], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:43.166005Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:26:43.166072Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4635:3974], ActorId: [2:4736:4045], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTBlYTI1OGYtNGYwMzk1YjQtMTEzYjU2MWQtNjM0OTM4ZDk=, TxId: 2026-02-19T08:26:43.166181Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33110, txId: 281474976720669] shutting down 2026-02-19T08:26:43.166832Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4819:4109], ActorId: [2:4820:4110], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTc3NzBhZGQtOTQ0ZjA2ZTktNDMxZDhhM2MtZDE2YTRmYWY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:43.206461Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4819:4109], ActorId: [2:4820:4110], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTc3NzBhZGQtOTQ0ZjA2ZTktNDMxZDhhM2MtZDE2YTRmYWY=, TxId: 2026-02-19T08:26:43.206536Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4819:4109], ActorId: [2:4820:4110], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTc3NzBhZGQtOTQ0ZjA2ZTktNDMxZDhhM2MtZDE2YTRmYWY=, TxId: 2026-02-19T08:26:43.206971Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4818:4108], ActorId: [2:4819:4109], Got response [2:4820:4110] SUCCESS 2026-02-19T08:26:43.207281Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:43.223434Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-02-19T08:26:43.223517Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3365:3494] 2026-02-19T08:26:43.224293Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4851:4131]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:26:43.224734Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-02-19T08:26:43.224805Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:43.225109Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-02-19T08:26:43.225150Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 6 ], Database[ Root/Database ] 2026-02-19T08:26:43.225216Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:43.225517Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:26:43.230259Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-02-19T08:26:43.231466Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 6 2026-02-19T08:26:43.231658Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 6 2026-02-19T08:26:43.232263Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [2:4865:4145]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-02-19T08:26:43.232556Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-02-19T08:26:43.232614Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:43.232863Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-02-19T08:26:43.232903Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 7 ], Database[ Root/Database ] 2026-02-19T08:26:43.232963Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:43.233226Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-02-19T08:26:43.235474Z node 2 :STATISTICS WARN: database.cpp:263: [ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-02-19T08:26:43.235832Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 7 2026-02-19T08:26:43.237410Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 7 >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] Test command err: 2026-02-19T08:22:26.164015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488809520349454:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:26.164493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:26.199831Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:26.359259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:26.377811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:26.377931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:26.440148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:26.443419Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:26.444742Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488809520349422:2081] 1771489346162406 != 1771489346162409 2026-02-19T08:22:26.522459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f44/r3tmp/yandexxyFCUQ.tmp 2026-02-19T08:22:26.522485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f44/r3tmp/yandexxyFCUQ.tmp 2026-02-19T08:22:26.522636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f44/r3tmp/yandexxyFCUQ.tmp 2026-02-19T08:22:26.522729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:26.544212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:26.549175Z INFO: TTestServer started on Port 15475 GrpcPort 16218 PQClient connected to localhost:16218 2026-02-19T08:22:26.742529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:26.804897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:27.171579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:28.377057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488818110285066:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.377110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488818110285048:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.377279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.378590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488818110285072:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.378668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:28.381542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:28.391092Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488818110285070:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:22:28.576100Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488818110285136:2643] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:28.596233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:28.623409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:28.684455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:28.693128Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488818110285144:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:28.694649Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZTc0NzQ2Yy00ZTc4M2YwYi0yN2IxNWE1Yy1kOTIzOWQ3Ng==, ActorId: [1:7608488818110285037:2333], ActorState: ExecuteState, LegacyTraceId: 01khtfwjrqecw4awwrat95whyx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:28.696537Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488818110285427:2820] 2026-02-19T08:22:31.164108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488809520349454:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:31.164212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:34.746024Z :Sinks_Oltp_WriteToTopicAndTable_1_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:22:34.757219Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:34.769284Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488843880089419:2922] connected; active server actors: 1 2026-02-19T08:22:34.769766Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:34.773675Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:34.773814Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:34.776395Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:34.777103Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:34.777264Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:22:34.777296Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:34.777336Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037892] doesn't have tx writes info 2026-02-19T08:22:34.777508Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037892] server connected, pipe [1: ... 6Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037897][topic_B] pipe [15:7608489880568983495:3324] client test-consumer disconnected session test-consumer_15_2_14510192613615930277_v1 2026-02-19T08:26:41.641975Z :DEBUG: [/Root] [/Root] [eaa5a69a-74144eb0-1a9c1880-23dcda88] [] Abort session to cluster 2026-02-19T08:26:41.640622Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037896] Destroy direct read session test-consumer_15_2_14510192613615930277_v1 2026-02-19T08:26:41.640652Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [15:7608489880568983498:3327] destroyed 2026-02-19T08:26:41.640689Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_2_14510192613615930277_v1 2026-02-19T08:26:41.642601Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_18387305545004685447_v1 grpc read done: success# 0, data# { } 2026-02-19T08:26:41.642625Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_18387305545004685447_v1 grpc read failed 2026-02-19T08:26:41.642647Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_15_1_18387305545004685447_v1 grpc closed 2026-02-19T08:26:41.642675Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_18387305545004685447_v1 is DEAD 2026-02-19T08:26:41.643570Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608489871979048820:3290] disconnected. 2026-02-19T08:26:41.643599Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608489871979048820:3290] disconnected; active server actors: 1 2026-02-19T08:26:41.643618Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608489871979048820:3290] client test-consumer disconnected session test-consumer_15_1_18387305545004685447_v1 2026-02-19T08:26:41.643697Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_18387305545004685447_v1 2026-02-19T08:26:41.643726Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489871979048823:3293] destroyed 2026-02-19T08:26:41.643765Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_18387305545004685447_v1 2026-02-19T08:26:41.643778Z :NOTICE: [/Root] [/Root] [eaa5a69a-74144eb0-1a9c1880-23dcda88] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:26:41.645789Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:26:41.645837Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:26:41.645891Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:26:41.646358Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:41.646394Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:41.646799Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0 grpc read done: success: 0 data: 2026-02-19T08:26:41.646823Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0 grpc read failed 2026-02-19T08:26:41.646997Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:26:41.647018Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:26:41.647043Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:26:41.647295Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 7 sessionId: test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0 2026-02-19T08:26:41.647417Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:41.647334Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|ddb8e71b-e62c4f19-ad4e1f52-5baa0f5b_0 is DEAD 2026-02-19T08:26:41.647471Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:41.647780Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:41.647825Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:41.647952Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [15:7608489871979048643:3269] destroyed 2026-02-19T08:26:41.647996Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:41.648023Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:41.648042Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.648047Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0 grpc read done: success: 0 data: 2026-02-19T08:26:41.648056Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:41.648071Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0 grpc read failed 2026-02-19T08:26:41.648072Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.648088Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:26:41.648103Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0 grpc closed 2026-02-19T08:26:41.648122Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|92a8050e-d3fa080d-8f635f83-190868dd_0 is DEAD 2026-02-19T08:26:41.648133Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [15:7608489871979048646:3269] destroyed 2026-02-19T08:26:41.649432Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:41.649486Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:41.649671Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489871979048600:3261] destroyed 2026-02-19T08:26:41.649784Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489871979048603:3261] destroyed 2026-02-19T08:26:41.649830Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:41.649866Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:41.649892Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.649910Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:41.649930Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.649942Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:41.737432Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:41.737472Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737491Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:41.737513Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737531Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:41.737584Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:41.737607Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737619Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:41.737632Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737642Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:41.737673Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:41.737686Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737697Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:41.737729Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:41.737738Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::TrickyTableAndColumnNames [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:04.329464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:04.404160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:04.408528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:04.408772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:04.408885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:04.696995Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:04.774732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:04.774877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:04.831824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.935586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:05.552081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:05.552460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:05.552488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:05.552512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:05.552905Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:05.614854Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:06.125653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:08.753740Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:08.759568Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:08.762373Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:08.786422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:08.786516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:08.835302Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:08.836985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:08.994944Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:08.995031Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:08.996106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.996652Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.997185Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.997741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.998003Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.998111Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.998289Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.998389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:08.998542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:09.013522Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:09.201165Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:09.250951Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:09.251065Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:09.288714Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:09.288930Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:09.289122Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:09.289181Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:09.289252Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:09.289328Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:09.289378Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:09.289435Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:09.289997Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:09.295221Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:09.295330Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:09.320387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:26:09.321244Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:26:09.391369Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:26:09.393565Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:09.405233Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:26:09.405291Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:26:09.405372Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:09.411442Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:09.415545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:09.423145Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:09.423277Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:09.466046Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:09.504128Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:09.652628Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:09.914835Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:10.043487Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:10.043563Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:26:10.732540Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... _analyze.cpp:112: [72075186224037894] TTxAnalyze::Complete 2026-02-19T08:26:20.941847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:26:20.941919Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:22.647134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:26:22.647208Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:35.764801Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-02-19T08:26:35.764887Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8557: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:35.764927Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8588: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:35.764965Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-02-19T08:26:38.055015Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-02-19T08:26:38.055080Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-02-19T08:26:38.055273Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 1 2026-02-19T08:26:38.068804Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-02-19T08:26:39.192175Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:26:39.192418Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 34], AnalyzeActorId: [2:3773:3378] 2026-02-19T08:26:39.195889Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:26:39.210292Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], RunStreamQuery with text: SELECT count(*),HLL(`key`),min(`key`),max(`key`),HLL(`val-Val`),min(`val-Val`),max(`val-Val`) FROM `/Root/Database/test\\Test\`test` 2026-02-19T08:26:39.210507Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], Start read next stream part 2026-02-19T08:26:39.221405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:3794:3388], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:39.221525Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:3807:3393], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:39.221667Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:39.222502Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:3811:3397], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:39.222620Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:39.228289Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:3828:3402], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:39.231219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:39.276150Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:3810:3396], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-02-19T08:26:39.404911Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:3890:3444], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:39.477663Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:3889:3443] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:39.743453Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3911:3457]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:39.743657Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:39.743716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:3913:3459] 2026-02-19T08:26:39.743774Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:3913:3459] 2026-02-19T08:26:39.744057Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3914:3460] 2026-02-19T08:26:39.744136Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:3913:3459], server id = [2:3914:3460], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:39.744180Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3914:3460], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:39.744231Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:39.744313Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:39.744370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:3911:3457], StatRequests.size() = 1 2026-02-19T08:26:39.744448Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:26:43.412986Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:26:43.413295Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], Start read next stream part 2026-02-19T08:26:43.414798Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33110, txId: 281474976720660] shutting down 2026-02-19T08:26:43.415394Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:3952:3488], ActorId: [2:3956:3490], Starting query actor #1 [2:3957:3491] 2026-02-19T08:26:43.415479Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3956:3490], ActorId: [2:3957:3491], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:43.418468Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:26:43.418539Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3773:3378], ActorId: [2:3783:3382], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTBmMDhhNzQtZTlkYWEyZmYtNmU4OTVlMTEtNTY3OTE0ZQ==, TxId: 2026-02-19T08:26:43.419680Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:3956:3490], ActorId: [2:3957:3491], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YWQ4ZWJmZTgtZjkyNDRhYzMtZDk2M2Q2MzEtOThiNDZkOTY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:43.487643Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3970:3503]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:43.487869Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:43.487913Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:3970:3503], StatRequests.size() = 1 2026-02-19T08:26:43.665535Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:3956:3490], ActorId: [2:3957:3491], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWQ4ZWJmZTgtZjkyNDRhYzMtZDk2M2Q2MzEtOThiNDZkOTY=, TxId: 2026-02-19T08:26:43.665634Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3956:3490], ActorId: [2:3957:3491], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWQ4ZWJmZTgtZjkyNDRhYzMtZDk2M2Q2MzEtOThiNDZkOTY=, TxId: 2026-02-19T08:26:43.665981Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:3952:3488], ActorId: [2:3956:3490], Got response [2:3957:3491] SUCCESS 2026-02-19T08:26:43.666589Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:43.680067Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:43.680155Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:2894:3416] >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> KqpBatchPEA::ExecuteState_ChildExecuterInternalError [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeSchedulerService::EnabledSchedulerWithDisabledWorkloadManager [GOOD] Test command err: 2026-02-19T08:26:41.559225Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489901797917457:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:41.560635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:41.760804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:41.783638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:41.783730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:41.840966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:41.877784Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:41.883710Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489901797917414:2081] 1771489601556367 != 1771489601556370 2026-02-19T08:26:41.930777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:41.930828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:41.930837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:41.930932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:41.942385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:42.130342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:42.565817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:44.451629Z node 1 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:49: Disabled on start 2026-02-19T08:26:44.452666Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-02-19T08:26:44.452743Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:116: [WorkloadService] [Service] Resource pools was disabled 2026-02-19T08:26:44.455144Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:79: Subscribed to config changes 2026-02-19T08:26:44.455176Z node 1 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:86: Become enabled 2026-02-19T08:26:44.457024Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=NDZlZTcwNmMtNjZkODIwZmQtYzE3ZTA5Y2UtYTNjMTdhM2I=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NDZlZTcwNmMtNjZkODIwZmQtYzE3ZTA5Y2UtYTNjMTdhM2I= temp_dir_name# fd8633f6-446f-2cbd-a012-10b80f9733eb trace_id# 2026-02-19T08:26:44.457701Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:309} SessionId: ydb://session/3?node_id=1&id=NDZlZTcwNmMtNjZkODIwZmQtYzE3ZTA5Y2UtYTNjMTdhM2I=, ActorId: [1:7608489914682820170:2329], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-02-19T08:26:44.464689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:26:44.489228Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Start pool creating 2026-02-19T08:26:44.492967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:44.494018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Subscribe on create pool tx: 281474976715659 2026-02-19T08:26:44.494189Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Tablet to pipe successfully connected 2026-02-19T08:26:44.518081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:26:44.613056Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Start pool creating 2026-02-19T08:26:44.648090Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608489914682820365:2636] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/zero_pool\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:44.648194Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489914682820216:2519], DatabaseId: Root, PoolId: zero_pool, Pool successfully created 2026-02-19T08:26:44.648654Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:101: Add database: /Root 2026-02-19T08:26:44.650756Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY= temp_dir_name# dab34145-44b5-fa9f-aee7-909eef0d6e05 trace_id# 2026-02-19T08:26:44.650871Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:309} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-02-19T08:26:44.651301Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:542} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ReadyState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7608489914682820401:2671] database# Root database_id# /Root pool_id# trace_id# 2026-02-19T08:26:44.651447Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:750} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Sending CompileQuery request trace_id# 2026-02-19T08:26:45.056417Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1820} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136714258237144 trace_id# 2026-02-19T08:26:45.056486Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1991} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Sending to Executer span_id_size# 8 trace_id# 2026-02-19T08:26:45.056757Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2062} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Created new KQP executer executer_id# [1:7608489918977788018:2337] is_rollback# false trace_id# 2026-02-19T08:26:45.090919Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2587} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Forwarded TEvStreamData to [1:7608489914682820401:2671] trace_id# 2026-02-19T08:26:45.097130Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2467} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-02-19T08:26:45.097334Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2795} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, TxInfo status# Committed kind# Pure total_duration# 41.397 server_duration# 40.95 queries_count# 2 trace_id# 2026-02-19T08:26:45.097412Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2969} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-02-19T08:26:45.098056Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3378} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-02-19T08:26:45.098117Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3455} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, EndCleanup is_final# true trace_id# 2026-02-19T08:26:45.098187Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3170} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: ExecuteState, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Sent query response back to proxy proxy_request_id# 3 proxy_id# [1:7608489901797917679:2269] trace_id# 2026-02-19T08:26:45.098218Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3469} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: unknown state, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-02-19T08:26:45.098566Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3564} SessionId: ydb://session/3?node_id=1&id=Y2VkNzY5NGMtZTZiYTEyYWQtZTVkNjE2OTgtOTc2NWY4OGY=, ActorId: [1:7608489914682820402:2337], ActorState: unknown state, LegacyTraceId: 01khtg4d1bfxpr0kbg4e451py9, Session actor destroyed trace_id# >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:06.154701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:06.243869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:06.248712Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:06.248962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:06.249083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:06.550063Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:06.636902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:06.637014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:06.694741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:06.808884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:07.442881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:07.443317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:07.443349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:07.443377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:07.443784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:07.506998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:08.024359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:10.431323Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:10.437041Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:10.439838Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:10.461975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:10.462062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:10.510112Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:10.511619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:10.675908Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:10.675988Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:10.676994Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.677456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.677981Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.678481Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.678722Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.678802Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.678941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.679035Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.679148Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:10.692998Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:10.859342Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:10.902205Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:10.902290Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:10.935397Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:10.935626Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:10.935782Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:10.935822Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:10.935865Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:10.935907Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:10.935947Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:10.935983Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:10.936408Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:10.940794Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:10.940882Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:10.962974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:26:10.963744Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:26:11.023631Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:26:11.025592Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:11.035829Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:26:11.035885Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:26:11.035946Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:11.041605Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:11.045646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:11.051458Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:11.051595Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:11.062318Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:11.237379Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:11.263902Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:11.516511Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:11.652126Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:11.652195Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:26:12.393407Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 894] Loaded traversal start key 2026-02-19T08:26:44.756605Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2026-02-19T08:26:44.756634Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-02-19T08:26:44.756665Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1771489604669508 2026-02-19T08:26:44.756694Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-02-19T08:26:44.756720Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-02-19T08:26:44.756787Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-02-19T08:26:44.756845Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:44.756924Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-02-19T08:26:44.756969Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:44.757023Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:44.757071Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:44.757174Z node 2 :STATISTICS DEBUG: tx_init.cpp:300: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:44.758108Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:44.758339Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5579:4777] Owner: [2:5578:4776]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:44.758386Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5579:4777] Owner: [2:5578:4776]. Column diff is empty, finishing 2026-02-19T08:26:44.758817Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-02-19T08:26:44.758890Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-02-19T08:26:44.759817Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-02-19T08:26:44.759882Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-02-19T08:26:44.761359Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-02-19T08:26:44.779718Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5588:4784] 2026-02-19T08:26:44.779967Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5588:4784], schemeshard id = 72075186224037897 2026-02-19T08:26:44.780068Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5545:4757], server id = [2:5589:4785], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:44.780157Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5589:4785] 2026-02-19T08:26:44.780241Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5589:4785], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-02-19T08:26:44.830488Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-02-19T08:26:44.830664Z node 2 :STATISTICS DEBUG: service_impl.cpp:587: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-02-19T08:26:44.831843Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5594:4789], server id = [2:5598:4793], tablet id = 72075186224037899, status = OK 2026-02-19T08:26:44.832268Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5594:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:44.832574Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5595:4790], server id = [2:5599:4794], tablet id = 72075186224037900, status = OK 2026-02-19T08:26:44.832633Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5595:4790], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:44.834580Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5596:4791], server id = [2:5601:4796], tablet id = 72075186224037901, status = OK 2026-02-19T08:26:44.834647Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5596:4791], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:44.834826Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:5597:4792], server id = [2:5600:4795], tablet id = 72075186224037902, status = OK 2026-02-19T08:26:44.834877Z node 2 :STATISTICS DEBUG: service_impl.cpp:992: TEvStatisticsRequest send, client id = [2:5597:4792], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-02-19T08:26:44.841541Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-02-19T08:26:44.842275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5594:4789], server id = [2:5598:4793], tablet id = 72075186224037899 2026-02-19T08:26:44.842329Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:44.842651Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-02-19T08:26:44.843443Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5595:4790], server id = [2:5599:4794], tablet id = 72075186224037900 2026-02-19T08:26:44.843477Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:44.844553Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-02-19T08:26:44.844955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5597:4792], server id = [2:5600:4795], tablet id = 72075186224037902 2026-02-19T08:26:44.844988Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:44.845188Z node 2 :STATISTICS DEBUG: service_impl.cpp:316: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-02-19T08:26:44.845243Z node 2 :STATISTICS DEBUG: service_impl.cpp:501: Send aggregate statistics response to node: 2 2026-02-19T08:26:44.845611Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-02-19T08:26:44.845780Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-02-19T08:26:44.846096Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5614:4805], ActorId: [2:5615:4806], Starting query actor #1 [2:5616:4807] 2026-02-19T08:26:44.846168Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5615:4806], ActorId: [2:5616:4807], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:26:44.849814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1058: EvClientDestroyed, node id = 2, client id = [2:5596:4791], server id = [2:5601:4796], tablet id = 72075186224037901 2026-02-19T08:26:44.849854Z node 2 :STATISTICS DEBUG: service_impl.cpp:1076: Skip EvClientDestroyed 2026-02-19T08:26:44.850771Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5615:4806], ActorId: [2:5616:4807], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGMxOGJmYjItMzQ2OWRhYTMtZWRhMmIyNGItMjdmNDk0ZTY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:26:44.973843Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5625:4816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:44.974211Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:26:44.974270Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5625:4816], StatRequests.size() = 1 2026-02-19T08:26:45.149169Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5615:4806], ActorId: [2:5616:4807], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGMxOGJmYjItMzQ2OWRhYTMtZWRhMmIyNGItMjdmNDk0ZTY=, TxId: 2026-02-19T08:26:45.149266Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5615:4806], ActorId: [2:5616:4807], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGMxOGJmYjItMzQ2OWRhYTMtZWRhMmIyNGItMjdmNDk0ZTY=, TxId: 2026-02-19T08:26:45.149865Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5614:4805], ActorId: [2:5615:4806], Got response [2:5616:4807] SUCCESS 2026-02-19T08:26:45.150317Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:26:45.187253Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:26:45.187331Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:48: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-02-19T08:26:45.258183Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5653:4824]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:45.258722Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:45.258810Z node 2 :STATISTICS DEBUG: service_impl.cpp:746: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-02-19T08:26:45.259181Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:26:45.259231Z node 2 :STATISTICS DEBUG: service_impl.cpp:653: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ] 2026-02-19T08:26:45.259290Z node 2 :STATISTICS DEBUG: database.cpp:224: [DispatchLoadStatisticsQuery] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-02-19T08:26:45.262890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1089: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchPEA::ExecuteState_ChildExecuterInternalError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:22.221539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:22.314882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:22.334512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:22.334580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:22.336685Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:22.572865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:22.573002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:22.636859Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489579893174 != 1771489579893178 2026-02-19T08:26:22.643868Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:22.687591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:22.816553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:23.124857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:23.124950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:23.125015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:23.125487Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:23.137966Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:24.412931Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:24.413067Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:24.413326Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 4 Stages# 1 trace_id# 2026-02-19T08:26:24.413407Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:24.413674Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715661. Resolved key sets: 1 2026-02-19T08:26:24.413896Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715661. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:24.414097Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:3030: Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 '('"Value" (String '"Value0"))) (return (Iterator (AsList (AsStruct '('"Key" (Uint32 '0)) $1) (AsStruct '('"Key" (Uint32 '1)) $1) (AsStruct '('"Key" (Uint32 '2)) $1) (AsStruct '('"Key" (Uint32 '3)) $1) (AsStruct '('"Key" (Uint32 '4)) $1)))) )))) ) 2026-02-19T08:26:24.414168Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:2018: Stage [0,0] create compute task: 1 2026-02-19T08:26:24.414420Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:26:24.414902Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:26:24.414970Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:26:24.415087Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:26:24.415147Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2752} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-02-19T08:26:24.415244Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1225:2780] 2026-02-19T08:26:24.415299Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1225:2780], channels: 0 2026-02-19T08:26:24.415386Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:26:24.415453Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:26:24.415521Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:26:24.427906Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:26:24.428035Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:1225:2780], trace_id# 2026-02-19T08:26:24.428110Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1225:2780], trace_id# 2026-02-19T08:26:24.429277Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1225:2780] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1743 Tasks { TaskId: 1 CpuTimeUs: 1022 FinishTimeMs: 1771489584428 EgressBytes: 70 EgressRows: 5 ComputeCpuTimeUs: 49 BuildCpuTimeUs: 973 HostName: "ghrun-fkrokuvqke" NodeId: 1 CreateTimeMs: 1771489584415 UpdateTimeMs: 1771489584428 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:24.429351Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNzEyZDktZjQxN2RhZDItYmYzZGM3NTctNTIyODdlZTU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1225:2780] 2026-02-19T08:26:24.429433Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [1:1222:2780] TxId: 281474976715661. Ctx: { TraceId: 01khtg3s46dkafmygdhq86gz07, Database: /Root, SessionId: ydb ... me ZombieState trace_id# 2026-02-19T08:26:45.721241Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:219} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Got tx response. Sender# [2:1323:3054] PartitionIndex# 3 Status# INTERNAL_ERROR 2026-02-19T08:26:45.721338Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:252} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 4, Message: Executer unretriable error. PartitionIndex# 3 Status# INTERNAL_ERROR Issues# {
: Fatal: Test internal error from child executer } 2026-02-19T08:26:45.721409Z node 2 :KQP_EXECUTER ERROR: {KQPPEA@kqp_partitioned_executer.cpp:853} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: First error occurred. Status# INTERNAL_ERROR Issues# {
: Fatal: Test internal error from child executer } 2026-02-19T08:26:45.721486Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:630} ActorId: [2:1314:3054], ActorState: ExecuteState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Entering AbortState, trying to finish execution. ActivePartitionsCount# 3 ReturnStatus# INTERNAL_ERROR 2026-02-19T08:26:45.722114Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1319:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722181Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1319:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722308Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1319:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:26:45.722405Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1319:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:45.722456Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1319:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:45.722579Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722626Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722689Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:26:45.722734Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:45.722758Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1321:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:45.722822Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1081} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Got EvAbortExecution Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722856Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } trace_id# 2026-02-19T08:26:45.722909Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1360} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: ABORTED Issues { message: "Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ]" severity: 1 } Result { Stats { } } TargetActor# [2:1314:3054] trace_id# 2026-02-19T08:26:45.722954Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:45.722996Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1317:3054] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:45.723129Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 3, Message: Got tx response. Sender# [2:1319:3054] PartitionIndex# 1 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:45.723203Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 2 TotalPartitions# 4 2026-02-19T08:26:45.723308Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 2, Message: Got tx response. Sender# [2:1321:3054] PartitionIndex# 2 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:45.723338Z node 2 :KQP_EXECUTER DEBUG: {KQPPEA@kqp_partitioned_executer.cpp:839} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Not all partitions have been processed, cannot finish execution. RemainingPartitionsCount# 1 TotalPartitions# 4 2026-02-19T08:26:45.723436Z node 2 :KQP_EXECUTER TRACE: {KQPPEA@kqp_partitioned_executer.cpp:364} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 1, Message: Got tx response. Sender# [2:1317:3054] PartitionIndex# 0 Status# ABORTED Issues# {
: Error: Aborted by PEA: [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] } 2026-02-19T08:26:45.723528Z node 2 :KQP_EXECUTER INFO: {KQPPEA@kqp_partitioned_executer.cpp:825} ActorId: [2:1314:3054], ActorState: AbortState, Operation: BATCH UPDATE, ActivePartitions: 0, Message: All partitions processed, finish execution. Status# INTERNAL_ERROR Issues# [ {
: Fatal: Test internal error from child executer } {
: Error: while executing BATCH UPDATE query } ] 2026-02-19T08:26:45.731461Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:45.731983Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [2:1335:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [2:1334:3054] trace_id# 2026-02-19T08:26:45.732219Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1335:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:45.732257Z node 2 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [2:1335:3054] TxId: 281474976715669. Ctx: { TraceId: 01khtg4dyc7bndjrsmjp7vp10y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjcyOTM4NjEtYmVhNjIxZmEtZTZjYjViMWUtNTI2MGNlOGY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:26:44.646310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:44.646400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:44.646460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:44.646491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:44.646528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:44.646561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:44.646593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:44.646622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:44.646691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:44.646776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:44.647586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:44.647865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:44.778939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:26:44.779027Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:44.779869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:44.791834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:44.791958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:44.792176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:44.803553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:44.804197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:44.804898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:44.805248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:44.810114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:44.810338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:44.812176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:44.812241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:44.812494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:44.812544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:44.812617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:44.812967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:44.974986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.975984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.976879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:44.977013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... fCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:26:47.170874Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.170941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.170970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:47.170995Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:26:47.171021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:26:47.171810Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.171861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.171878Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:47.171896Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:26:47.171919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:26:47.171985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:26:47.173649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:26:47.174103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:26:47.174749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:26:47.174918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:26:47.174947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:26:47.175209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:26:47.175297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:26:47.175324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:743:2732] TestWaitNotification: OK eventTxId 101 2026-02-19T08:26:47.175626Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:47.175761Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 153us result status StatusSuccess 2026-02-19T08:26:47.175976Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-02-19T08:26:47.177660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:47.177782Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2026-02-19T08:26:47.177858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2026-02-19T08:26:47.179472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:47.179662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:26:47.179862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:26:47.179890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:26:47.180148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:26:47.180208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:26:47.180237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:751:2740] TestWaitNotification: OK eventTxId 103 2026-02-19T08:26:47.180521Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:47.180695Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 178us result status StatusSuccess 2026-02-19T08:26:47.180940Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Initializer::Simple [GOOD] >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> TExternalDataSourceTest::SchemeErrors >> TGRpcStreamingTest::WritesDoneFromClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:26:46.983645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:46.983713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.983760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:46.983788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:46.983817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:46.983845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:46.983869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:46.983907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:46.983987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.984055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:46.984870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:46.985114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:47.057379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:26:47.057433Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:47.066592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:47.068885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:47.069053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:47.079427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:47.080036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:47.080691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:47.080935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:47.083747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:47.083936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:47.084924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:47.084970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:47.085047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:47.085080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:47.085120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:47.085290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:47.212057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.213991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.214081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.214153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.214226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.214317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Id: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:26:47.832104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:26:47.832227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:26:47.832267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:26:47.832311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:26:47.832345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:26:47.832381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:26:47.832427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:26:47.832467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:26:47.832509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:26:47.832597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:26:47.832638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:26:47.832675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:26:47.832706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:26:47.833425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.833526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.833583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:47.833638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:26:47.833688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:26:47.834438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.834528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:47.834563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:47.834593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:26:47.834630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:26:47.834716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:26:47.838095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:26:47.838231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:26:47.838439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:26:47.838484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:26:47.838903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:26:47.838998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:26:47.839046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2689] TestWaitNotification: OK eventTxId 101 2026-02-19T08:26:47.839473Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:47.839708Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 242us result status StatusSuccess 2026-02-19T08:26:47.840093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-02-19T08:26:47.843341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:47.843643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-02-19T08:26:47.843752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:202: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-02-19T08:26:47.843903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2026-02-19T08:26:47.846232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:26:47.846492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:26:47.846792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:26:47.846837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:26:47.847180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:26:47.847273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:26:47.847316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2697] TestWaitNotification: OK eventTxId 102 >> TGRpcStreamingTest::ClientDisconnects >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:26:46.141212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:46.141327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.141413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:46.141459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:46.141501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:46.141542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:46.141589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:46.141626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:46.141737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.141821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:46.142800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:46.143143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:46.288534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:26:46.288638Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:46.289595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:46.302828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:46.302973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:46.303188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:46.317199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:46.318159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:46.318979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:46.319393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:46.327293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:46.327546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:46.329668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:46.329785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:46.330071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:46.330119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:46.330163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:46.330494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:46.463286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:46.464769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... bletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-02-19T08:26:48.182836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:48.182946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:48.182999Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:26:48.183099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:26:48.183183Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:26:48.183349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:26:48.183417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:26:48.184252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:26:48.184680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:26:48.186053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:48.186089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:48.186231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:26:48.186386Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:48.186428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:215:2214], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:26:48.186463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:215:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:26:48.186676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:26:48.186714Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:26:48.186804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:26:48.186838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:26:48.186875Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:26:48.186908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:26:48.186945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:26:48.186987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:26:48.187019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:26:48.187051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:26:48.187119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:26:48.187155Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:26:48.187205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:26:48.187239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:26:48.187678Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:26:48.187751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:26:48.187786Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:26:48.187826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:26:48.187868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:26:48.188124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:26:48.188168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:26:48.188227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:26:48.188422Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:26:48.188479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:26:48.188504Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:26:48.188530Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:26:48.188555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:26:48.188628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:26:48.191510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:26:48.191997Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:26:48.192076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:26:48.192262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:26:48.192298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:26:48.192633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:26:48.192720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.192757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:738:2726] TestWaitNotification: OK eventTxId 102 2026-02-19T08:26:48.193159Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:48.193322Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 193us result status StatusPathDoesNotExist 2026-02-19T08:26:48.193496Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:40.567131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:40.656033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:40.673851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:40.673924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:40.674365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:40.907590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:40.907705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:40.983050Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489538330360 != 1771489538330364 2026-02-19T08:25:40.995016Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:41.041163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:41.140619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:41.591110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:41.591171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:41.591198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:41.593462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:41.594179Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:41.608445Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:51.742184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:909:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.742308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:919:2779], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.742374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.743136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.743327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:25:51.747601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:25:51.826413Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:924:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:25:51.884301Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:980:2820] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:25:52.087059Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:990:2829], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:25:52.089606Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ODMxNzI1ZjMtZTA4NWU0NTktZTk5ZmM0YzYtZTQ5Nzc4ZjI=, ActorId: [1:905:2771], ActorState: ExecuteState, LegacyTraceId: 01khtg2sbtckgeafwky5np3rja, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2026-02-19T08:25:52.158471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:52.992452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:25:53.348235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:54.029286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2026-02-19T08:26:15.569697Z node 1 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [1:1523:3225] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2026-02-19T08:26:15.569846Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1523:3225] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2026-02-19T08:26:47.339207Z node 1 :TX_PROXY ERROR: schemereq.cpp:1238: Actor# [1:1680:3340] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2026-02-19T08:26:47.339443Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1680:3340] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 >> TExternalDataSourceTest::SchemeErrors [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:26:46.691270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:46.691374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.691444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:46.691484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:46.691528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:46.691567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:46.691599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:46.691631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:46.691721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:46.691795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:46.692718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:46.693044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:46.830539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:26:46.830628Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:46.831495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:46.844124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:46.844239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:46.844487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:46.857282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:46.858007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:46.858810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:46.859195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:46.864314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:46.864536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:46.866589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:46.866679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:46.866917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:46.866972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:46.867022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:46.867394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:47.021906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.022953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.023949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:47.024020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... shard: 72057594046678944 2026-02-19T08:26:48.945085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-02-19T08:26:48.945128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.945150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.945230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.945250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.945335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-02-19T08:26:48.945381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-02-19T08:26:48.945465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.945492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.945627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-02-19T08:26:48.945686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.945737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.945876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-02-19T08:26:48.945920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.945941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.945984Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-02-19T08:26:48.946119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-02-19T08:26:48.946255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-02-19T08:26:48.946425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-02-19T08:26:48.946607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-02-19T08:26:48.946895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.946915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.946998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-02-19T08:26:48.947233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-02-19T08:26:48.947295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.947895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.947918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [2:801:2790] 2026-02-19T08:26:48.948016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-02-19T08:26:48.948040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [2:801:2790] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2026-02-19T08:26:48.951281Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:48.951476Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 229us result status StatusSuccess 2026-02-19T08:26:48.951845Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGRpcStreamingTest::ClientNeverWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2026-02-19T08:26:48.748900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:48.748982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:48.749068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:48.749106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:48.749141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:48.749175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:48.749203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:48.749234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:48.749289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:48.749352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:48.750210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:48.750509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:48.877489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:26:48.877579Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:48.878427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:48.890203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:48.890443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:48.890620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:48.902670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:48.903041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:48.903787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:48.904444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:48.907397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:48.907577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:48.908753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:48.908812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:48.908919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:48.908968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:48.909007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:48.909272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:49.050402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:49.051820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... 55Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2026-02-19T08:26:49.566733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2026-02-19T08:26:49.566929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2026-02-19T08:26:49.569273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:49.569565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-02-19T08:26:49.571782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:49.572104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-02-19T08:26:49.572191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2026-02-19T08:26:49.572395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-02-19T08:26:49.574732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:49.574980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-02-19T08:26:49.577250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:49.577574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-02-19T08:26:49.577757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2026-02-19T08:26:49.577924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-02-19T08:26:49.580151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:49.580438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-02-19T08:26:49.582764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:49.583009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2026-02-19T08:26:49.583117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2026-02-19T08:26:49.583218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-02-19T08:26:49.585448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:26:49.585698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 >> TGRpcStreamingTest::ReadFinish >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] Test command err: 2026-02-19T08:22:40.838387Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488868901732307:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.838461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.873646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:40.877666Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:41.174855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.174941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.226070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.233915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.275981Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.281894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488868901732074:2081] 1771489360801253 != 1771489360801256 2026-02-19T08:22:41.387490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f3d/r3tmp/yandexCxceXV.tmp 2026-02-19T08:22:41.387518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f3d/r3tmp/yandexCxceXV.tmp 2026-02-19T08:22:41.388145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f3d/r3tmp/yandexCxceXV.tmp 2026-02-19T08:22:41.388253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.430736Z INFO: TTestServer started on Port 3237 GrpcPort 5729 2026-02-19T08:22:41.518507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:5729 2026-02-19T08:22:41.670023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.754199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.837986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:44.057244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488886081602294:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.057445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.057925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488886081602307:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.058002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488886081602308:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.058137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:44.069922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:44.088012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488886081602311:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:22:44.147799Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488886081602375:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:44.449612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.456949Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488886081602383:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:44.458184Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NDJlNDg5MGQtODZmNjc5NjAtNDFlNDcyYTktZTQ0Mjk5MDk=, ActorId: [1:7608488886081602292:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfx22qf7da1v2vz9vhjq8g, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:44.460605Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:22:44.490032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.573559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488886081602657:2820] 2026-02-19T08:22:45.843485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488868901732307:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.843566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:50.052192Z :WriteToTopic_Demo_22_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:22:50.083991Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:50.117271Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488911851406619:2910] connected; active server actors: 1 2026-02-19T08:22:50.118110Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:50.132783Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:50.132961Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:50.136110Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:50.140075Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:50.140360Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:22:50.140391Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:5 ... :26:46.095227Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.095265Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.095280Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.095300Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.095315Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.193495Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.193539Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.193577Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.193600Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.193617Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.195990Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.196031Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.196047Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.196068Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.196085Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.231611Z node 15 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:26:46.294271Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.294347Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.294379Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.294425Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.294477Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.298263Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.298335Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.298360Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.298399Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.298430Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.326908Z :INFO: [/Root] [/Root] [b819cf6d-170191ec-4b9e88b5-8dc150df] Closing read session. Close timeout: 0.000000s 2026-02-19T08:26:46.326984Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:99:100 2026-02-19T08:26:46.327054Z :INFO: [/Root] [/Root] [b819cf6d-170191ec-4b9e88b5-8dc150df] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2069 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:26:46.327182Z :NOTICE: [/Root] [/Root] [b819cf6d-170191ec-4b9e88b5-8dc150df] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:26:46.327234Z :DEBUG: [/Root] [/Root] [b819cf6d-170191ec-4b9e88b5-8dc150df] [] Abort session to cluster 2026-02-19T08:26:46.327790Z :NOTICE: [/Root] [/Root] [b819cf6d-170191ec-4b9e88b5-8dc150df] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:26:46.329740Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_11645093351001210775_v1 grpc read done: success# 0, data# { } 2026-02-19T08:26:46.329784Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_11645093351001210775_v1 grpc read failed 2026-02-19T08:26:46.329822Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_15_1_11645093351001210775_v1 grpc closed 2026-02-19T08:26:46.329873Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_11645093351001210775_v1 is DEAD 2026-02-19T08:26:46.330273Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_11645093351001210775_v1 2026-02-19T08:26:46.330347Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489914747310812:2543] destroyed 2026-02-19T08:26:46.330396Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_11645093351001210775_v1 2026-02-19T08:26:46.330927Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:26:46.330973Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:26:46.331023Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:26:46.331171Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608489914747310809:2540] disconnected. 2026-02-19T08:26:46.331202Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608489914747310809:2540] disconnected; active server actors: 1 2026-02-19T08:26:46.331217Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608489914747310809:2540] client test-consumer disconnected session test-consumer_15_1_11645093351001210775_v1 2026-02-19T08:26:46.331386Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:46.331427Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:46.332224Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0 grpc read done: success: 0 data: 2026-02-19T08:26:46.332250Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0 grpc read failed 2026-02-19T08:26:46.332278Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0 grpc closed 2026-02-19T08:26:46.332291Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|8f7c4901-fb0eaa88-5639aed1-5f1ee95e_0 is DEAD 2026-02-19T08:26:46.333174Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:46.333213Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:46.333570Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489901862408595:2487] destroyed 2026-02-19T08:26:46.333596Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608489901862408598:2487] destroyed 2026-02-19T08:26:46.333623Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:46.333643Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.333654Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.333662Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.333676Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.333685Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.395368Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.395412Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.395429Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.395452Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.395471Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:46.398310Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:46.398354Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.398371Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:46.398392Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:46.398408Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2026-02-19T08:26:34.583519Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:34.583669Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:34.602929Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:34.603034Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:34.617482Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:34.625431Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:135:2159], cookie=8407113489879779009, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-02-19T08:26:34.626010Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:34.648861Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:135:2159], cookie=8407113489879779009) 2026-02-19T08:26:34.649649Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2167], cookie=2557865281639649139, path="/Root/Res", config={ }) 2026-02-19T08:26:34.649943Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-02-19T08:26:34.662143Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2167], cookie=2557865281639649139) 2026-02-19T08:26:34.666403Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:150:2172]. Cookie: 13539678555079267417. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:34.666504Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:150:2172], cookie=13539678555079267417) 2026-02-19T08:26:34.667132Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:150:2172]. Cookie: 6870839877824073538. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2026-02-19T08:26:34.667187Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:150:2172], cookie=6870839877824073538) 2026-02-19T08:26:36.854410Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:36.854533Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:36.869528Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:36.869659Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:36.904484Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:36.904945Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:134:2159], cookie=5387958123829477203, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:26:36.905241Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:36.917214Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:134:2159], cookie=5387958123829477203) 2026-02-19T08:26:36.917800Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:145:2167], cookie=4481294369986118212, path="/Root/Res", config={ }) 2026-02-19T08:26:36.917978Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-02-19T08:26:36.930008Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:145:2167], cookie=4481294369986118212) 2026-02-19T08:26:36.930696Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:150:2172]. Cookie: 8653160671890741778. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:36.930736Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:150:2172], cookie=8653160671890741778) 2026-02-19T08:26:36.931206Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:150:2172]. Cookie: 907296010307576399. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2026-02-19T08:26:36.931259Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:150:2172], cookie=907296010307576399) 2026-02-19T08:26:39.122031Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:39.122147Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:39.140973Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:39.141088Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:39.166571Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:39.167061Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:137:2161], cookie=5681663631965016033, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:26:39.167398Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:39.179630Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:137:2161], cookie=5681663631965016033) 2026-02-19T08:26:39.180224Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=17809002265617513138, path="/Root/Res1", config={ }) 2026-02-19T08:26:39.180465Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2026-02-19T08:26:39.192433Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=17809002265617513138) 2026-02-19T08:26:39.193154Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:152:2174], cookie=4582828159452090094, path="/Root/Res2", config={ }) 2026-02-19T08:26:39.193322Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2026-02-19T08:26:39.205417Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:152:2174], cookie=4582828159452090094) 2026-02-19T08:26:39.206255Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 5613562293372994792. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:39.206333Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=5613562293372994792) 2026-02-19T08:26:39.207012Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 3513355837295831971. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:39.207081Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=3513355837295831971) 2026-02-19T08:26:39.207694Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:157:2179]. Cookie: 17820522744713691233. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2026-02-19T08:26:39.207759Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:157:2179], cookie=17820522744713691233) 2026-02-19T08:26:41.374668Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:41.374749Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:41.388808Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:41.388969Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:41.414319Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:41.414736Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=15637425496219849469, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:26:41.414981Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:41.426900Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=15637425496219849469) 2026-02-19T08:26:41.427560Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 95877165517447301. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:41.427614Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=95877165517447301) 2026-02-19T08:26:41.428035Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 7149487575419883180. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-02-19T08:26:41.428069Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=7149487575419883180) 2026-02-19T08:26:41.428426Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 10162003870356485179. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-02-19T08:26:41.428462Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=10162003870356485179) 2026-02-19T08:26:43.653115Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:43.653230Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:43.674311Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:43.674957Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:43.710326Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:43.710836Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=10866680737579805781, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-02-19T08:26:43.711172Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:26:43.723407Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=10866680737579805781) 2026-02-19T08:26:43.724246Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2169]. Cookie: 6967286663478573974. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:43.724315Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2169], cookie=6967286663478573974) 2026-02-19T08:26:43.724794Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:147:2169]. Cookie: 12869722474457817085. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2026-02-19T08:26:43.724847Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:147:2169], cookie=12869722474457817085) 2026-02-19T08:26:46.233824Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:186:2193]. Cookie: 1895165765287643028. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:46.233896Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:186:2193], cookie=1895165765287643028) 2026-02-19T08:26:46.234406Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:186:2193]. Cookie: 4992220995943683533. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2026-02-19T08:26:46.234450Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:186:2193], cookie=4992220995943683533) 2026-02-19T08:26:48.386910Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:219:2219]. Cookie: 18104362171333945659. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-02-19T08:26:48.386977Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:219:2219], cookie=18104362171333945659) 2026-02-19T08:26:48.387449Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:219:2219]. Cookie: 5932277646865719260. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2026-02-19T08:26:48.387490Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:219:2219], cookie=5932277646865719260) >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:26:48.365113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:26:48.365197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:48.365255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:26:48.365289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:26:48.365324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:26:48.365355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:26:48.365383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:26:48.365407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:26:48.365490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:26:48.365597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:26:48.366438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:48.366712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:26:48.477553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:26:48.477613Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:48.478231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:26:48.489452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:26:48.489561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:26:48.489741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:26:48.500406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:26:48.501103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:26:48.501846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:26:48.502154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:26:48.506534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:48.506725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:26:48.508438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:26:48.508499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:26:48.508694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:26:48.508740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:26:48.508782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:26:48.509070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:26:48.663367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.664977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.665041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.665096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.665176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:26:48.665293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:50.413591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:26:50.413653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:26:50.414358Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:50.414431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:26:50.414462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:26:50.414489Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:26:50.414519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:26:50.414602Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:26:50.417871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:26:50.418182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:26:50.418384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:26:50.418425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:26:50.418796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:26:50.418887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:26:50.418922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:711:2700] TestWaitNotification: OK eventTxId 101 2026-02-19T08:26:50.419348Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:50.419541Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 233us result status StatusSuccess 2026-02-19T08:26:50.419854Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-02-19T08:26:50.422104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:26:50.422385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2026-02-19T08:26:50.422462Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2026-02-19T08:26:50.422626Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:26:50.425008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:26:50.425288Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:26:50.425591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:26:50.425635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:26:50.426045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:26:50.426158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:26:50.426197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:719:2708] TestWaitNotification: OK eventTxId 102 2026-02-19T08:26:50.426619Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:26:50.426849Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 252us result status StatusSuccess 2026-02-19T08:26:50.427211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] >> KqpBatchDelete::MultiStatement >> TxUsage::Write_And_Read_Small_Messages_1 [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] |94.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TGRpcStreamingTest::SimpleEcho >> KqpBatchDelete::HasTxControl [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2026-02-19T08:26:52.121671Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2026-02-19T08:26:48.647890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489931588105243:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:48.647957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:48.888527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:48.916991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:48.917087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:48.966332Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:48.974189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:48.974578Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489931588105201:2081] 1771489608645684 != 1771489608645687 2026-02-19T08:26:49.047078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:49.066674Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d3abc98fe80] stream accepted Name# Session ok# true peer# ipv6:[::1]:39134 2026-02-19T08:26:49.067210Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d3abc98fe80] facade attach Name# Session actor# [1:7608489935883073268:2462] peer# ipv6:[::1]:39134 2026-02-19T08:26:49.067244Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d3abc98fe80] facade read Name# Session peer# ipv6:[::1]:39134 2026-02-19T08:26:49.067364Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d3abc98fe80] read finished Name# Session ok# false data# peer# ipv6:[::1]:39134 2026-02-19T08:26:49.067549Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2026-02-19T08:26:49.067583Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d3abc98fe80] facade finish Name# Session peer# ipv6:[::1]:39134 grpc status# (9) message# Everything is A-OK 2026-02-19T08:26:49.067910Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d3abc98fe80] stream done notification Name# Session ok# true peer# ipv6:[::1]:39134 2026-02-19T08:26:49.067961Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d3abc98fe80] stream finished Name# Session ok# true peer# ipv6:[::1]:39134 grpc status# (9) message# Everything is A-OK 2026-02-19T08:26:49.067986Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d3abc98fe80] deregistering request Name# Session peer# ipv6:[::1]:39134 (finish done) 2026-02-19T08:26:49.068467Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone >> TxUsage::Write_And_Read_Small_Messages_2 >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2026-02-19T08:26:52.497342Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> TDatabaseResolverTests::Ydb_Serverless >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2026-02-19T08:26:49.179849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489936742451571:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:49.183124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:49.383356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:49.407040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:49.407125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:49.476708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:49.505814Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:49.507266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489936742451529:2081] 1771489609177270 != 1771489609177273 2026-02-19T08:26:49.563579Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d9e79080280] stream accepted Name# Session ok# true peer# ipv6:[::1]:41884 2026-02-19T08:26:49.563999Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d9e79080280] facade attach Name# Session actor# [1:7608489936742452242:2424] peer# ipv6:[::1]:41884 2026-02-19T08:26:49.564783Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d9e79080280] stream done notification Name# Session ok# true peer# ipv6:[::1]:41884 2026-02-19T08:26:49.564818Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2026-02-19T08:26:49.566445Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d9e79080280] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2026-02-19T08:26:49.566486Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d9e79080280] deregistering request Name# Session peer# unknown (finish done) 2026-02-19T08:26:49.662364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2026-02-19T08:26:53.146100Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2026-02-19T08:26:53.513436Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> BasicUsage::AutoPartitioning_KeyedWriteSession [GOOD] >> BasicUsage::AutoPartitioning_KeyedWriteSession_SmallMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: 2026-02-19T08:26:21.139395Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489818756450800:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:21.140255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:21.311522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:21.333307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:21.333372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:21.377879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:21.381235Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:21.382450Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489818756450770:2081] 1771489581137684 != 1771489581137687 2026-02-19T08:26:21.459818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:21.459839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:21.459851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:21.459972Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:21.475233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:23.893871Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a7/r3tmp/spilling-tmp-runner/node_1_26af6589-34d33881-30927a6c-74fc7a71, actor: [1:7608489827346387198:2384] 2026-02-19T08:26:23.894057Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a7/r3tmp/spilling-tmp-runner 2026-02-19T08:26:26.144913Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.144967Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.144986Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145001Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145017Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145050Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145086Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145109Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145140Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145175Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145200Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:26.145252Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3ses0b90p7j492vvtx92, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBkNjk2OTEtM2ZiYWJhYjgtN2FkYmVkYjctNjQ0ODQ4YzY=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:26.145418Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489840231290168:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3ses0b90p7j492vvtx92, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBkNjk2OTEtM2ZiYWJhYjgtN2FkYmVkYjctNjQ0ODQ4YzY=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:26.145449Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489840231290168:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3ses0b90p7j492vvtx92, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBkNjk2OTEtM2ZiYWJhYjgtN2FkYmVkYjctNjQ0ODQ4YzY=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:26.145828Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:26:26.146039Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146112Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146189Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146251Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146311Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146375Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146438Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146507Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146597Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:26.146654Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... -02-19T08:26:51.107951Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489947805811167:2531] 2026-02-19T08:26:51.107993Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489947805811168:2532], CA [4:7608489947805811169:2533], CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108032Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [4:7608489947805811168:2532], CA [4:7608489947805811169:2533], CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108129Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489947805811169:2533] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 697 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 400 FinishTimeMs: 1771489611105 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 387 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771489611102 UpdateTimeMs: 1771489611105 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:51.108151Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489947805811169:2533] 2026-02-19T08:26:51.108185Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489947805811168:2532], CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108215Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [4:7608489947805811168:2532], CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108516Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489947805811168:2532] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1168 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 821 FinishTimeMs: 1771489611105 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 22 BuildCpuTimeUs: 799 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771489611101 UpdateTimeMs: 1771489611105 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:51.108541Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489947805811168:2532] 2026-02-19T08:26:51.108562Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108582Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:7608489947805811170:2534], CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108647Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489947805811170:2534] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 659 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 389 FinishTimeMs: 1771489611105 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 377 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771489611102 UpdateTimeMs: 1771489611105 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:51.108663Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489947805811170:2534] 2026-02-19T08:26:51.108680Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.108695Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:7608489947805811171:2535], trace_id# 2026-02-19T08:26:51.109031Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:7608489947805811171:2535] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1604 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1154 FinishTimeMs: 1771489611106 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 1131 HostName: "ghrun-fkrokuvqke" NodeId: 4 CreateTimeMs: 1771489611103 UpdateTimeMs: 1771489611106 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:26:51.109054Z node 4 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [4:7608489947805811171:2535] 2026-02-19T08:26:51.109090Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [4:7608489947805811157:2401] trace_id# 2026-02-19T08:26:51.109118Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.010706s ReadRows: 0 ReadBytes: 0 RequestUnits# 7 ForceFlag# true trace_id# 2026-02-19T08:26:51.134149Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:51.134193Z node 4 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [4:7608489947805811158:2401] TxId: 281474976710672. Ctx: { TraceId: 01khtg4hse937yfwgr39y99afn, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OTBmM2VlNzMtMjkzMTUxMWEtZjdmZDdlMTUtNDVmYzRjYTY=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:26:51.353039Z node 4 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg4kezbwa3cdegqbaefs6h, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YzlkYmUxYWMtYmI5NGE2Mi1mMjQyMjIzOS04MDBjZjU4Mw==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:51.353364Z node 4 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:239} ActorId: [4:7608489947805811384:2538] TxId: 281474976710673. Ctx: { TraceId: 01khtg4kezbwa3cdegqbaefs6h, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YzlkYmUxYWMtYmI5NGE2Mi1mMjQyMjIzOS04MDBjZjU4Mw==, PoolId: default, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [4:7608489947805811383:2538] trace_id# 2026-02-19T08:26:51.353598Z node 4 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [4:7608489947805811384:2538] TxId: 281474976710673. Ctx: { TraceId: 01khtg4kezbwa3cdegqbaefs6h, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YzlkYmUxYWMtYmI5NGE2Mi1mMjQyMjIzOS04MDBjZjU4Mw==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:26:51.353658Z node 4 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [4:7608489947805811384:2538] TxId: 281474976710673. Ctx: { TraceId: 01khtg4kezbwa3cdegqbaefs6h, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YzlkYmUxYWMtYmI5NGE2Mi1mMjQyMjIzOS04MDBjZjU4Mw==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2026-02-19T08:26:50.658969Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489944232095604:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:50.659647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:50.851549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:50.880379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:50.880507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:50.932112Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:50.934054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:50.934458Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489944232095565:2081] 1771489610655006 != 1771489610655009 2026-02-19T08:26:51.006946Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7dbc0ab11080] stream accepted Name# Session ok# true peer# ipv6:[::1]:48150 2026-02-19T08:26:51.008792Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7dbc0ab11080] facade attach Name# Session actor# [1:7608489948527063553:2398] peer# ipv6:[::1]:48150 2026-02-19T08:26:51.008832Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7dbc0ab11080] facade read Name# Session peer# ipv6:[::1]:48150 2026-02-19T08:26:51.008967Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7dbc0ab11080] facade finish Name# Session peer# ipv6:[::1]:48150 grpc status# (0) message# 2026-02-19T08:26:51.009265Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7dbc0ab11080] read finished Name# Session ok# false data# peer# ipv6:[::1]:48150 2026-02-19T08:26:51.009306Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7dbc0ab11080] stream done notification Name# Session ok# true peer# ipv6:[::1]:48150 2026-02-19T08:26:51.009342Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7dbc0ab11080] stream finished Name# Session ok# true peer# ipv6:[::1]:48150 grpc status# (0) message# 2026-02-19T08:26:51.009385Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7dbc0ab11080] deregistering request Name# Session peer# ipv6:[::1]:48150 (finish done) 2026-02-19T08:26:51.010778Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 2026-02-19T08:26:51.025024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2026-02-19T08:26:50.382775Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489941029594013:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:50.385932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:50.611403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:50.640557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:50.640682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:50.702512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:50.716177Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:50.727932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489941029593968:2081] 1771489610379778 != 1771489610379781 2026-02-19T08:26:50.779806Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d940fc80280] stream accepted Name# Session ok# true peer# ipv6:[::1]:60702 2026-02-19T08:26:50.780137Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d940fc80280] facade attach Name# Session actor# [1:7608489941029594619:2360] peer# ipv6:[::1]:60702 2026-02-19T08:26:50.780173Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d940fc80280] facade read Name# Session peer# ipv6:[::1]:60702 2026-02-19T08:26:50.780257Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d940fc80280] facade write Name# Session data# peer# ipv6:[::1]:60702 2026-02-19T08:26:50.780579Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d940fc80280] facade finish Name# Session peer# ipv6:[::1]:60702 grpc status# (0) message# 2026-02-19T08:26:50.780637Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d940fc80280] write finished Name# Session ok# true peer# ipv6:[::1]:60702 2026-02-19T08:26:50.780697Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2026-02-19T08:26:50.780974Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d940fc80280] stream done notification Name# Session ok# true peer# ipv6:[::1]:60702 2026-02-19T08:26:50.781037Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2026-02-19T08:26:50.781043Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d940fc80280] stream finished Name# Session ok# true peer# ipv6:[::1]:60702 grpc status# (0) message# 2026-02-19T08:26:50.781065Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d940fc80280] read finished Name# Session ok# false data# peer# ipv6:[::1]:60702 2026-02-19T08:26:50.781102Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d940fc80280] deregistering request Name# Session peer# ipv6:[::1]:60702 (finish done) 2026-02-19T08:26:50.781109Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2026-02-19T08:26:54.048803Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:178: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> TKesusTest::TestAcquireWaiterRelease [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpScan::ScanDuringSplit10 |94.0%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireWaiterRelease [GOOD] Test command err: 2026-02-19T08:26:29.315440Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.315576Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.338928Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.339049Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.353747Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.354345Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=9245887204508945777, session=0, seqNo=0) 2026-02-19T08:26:29.354542Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:29.377353Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=9245887204508945777, session=1) 2026-02-19T08:26:29.378198Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=111, session=1, semaphore="Lock1" count=1) 2026-02-19T08:26:29.378369Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:29.378490Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:29.390657Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=111) 2026-02-19T08:26:29.391040Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2159], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:29.403158Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2159], cookie=222) 2026-02-19T08:26:29.403756Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=13938816743715371863, name="Lock1") 2026-02-19T08:26:29.403862Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=13938816743715371863) 2026-02-19T08:26:29.757431Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.757547Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.776434Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.776567Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.812434Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.813048Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=14042129051813063069, session=0, seqNo=0) 2026-02-19T08:26:29.813206Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:29.825465Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=14042129051813063069, session=1) 2026-02-19T08:26:29.825821Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=1302682429069715043, session=0, seqNo=0) 2026-02-19T08:26:29.825972Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:26:29.838265Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=1302682429069715043, session=2) 2026-02-19T08:26:29.838603Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:29.838753Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:29.838874Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:29.851227Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=111) 2026-02-19T08:26:29.851590Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:26:29.863802Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2159], cookie=222) 2026-02-19T08:26:29.864398Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2173], cookie=17734680305016990254, name="Lock1") 2026-02-19T08:26:29.864507Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2173], cookie=17734680305016990254) 2026-02-19T08:26:29.864955Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2176], cookie=6333735217362981529, name="Lock1") 2026-02-19T08:26:29.865029Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2176], cookie=6333735217362981529) 2026-02-19T08:26:29.878160Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:29.878275Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:29.878721Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:29.879311Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:29.916791Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:29.916902Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:29.917370Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=13580812102885420651, session=1, seqNo=0) 2026-02-19T08:26:29.939893Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=13580812102885420651, session=1) 2026-02-19T08:26:29.940262Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=12142981472609230151, session=2, seqNo=0) 2026-02-19T08:26:29.954314Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=12142981472609230151, session=2) 2026-02-19T08:26:29.955050Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:205:2217], cookie=8216155022114669068, name="Lock1") 2026-02-19T08:26:29.955155Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:205:2217], cookie=8216155022114669068) 2026-02-19T08:26:29.955711Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:208:2220], cookie=18124780319227248001, name="Lock1") 2026-02-19T08:26:29.955795Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:208:2220], cookie=18124780319227248001) 2026-02-19T08:26:30.395611Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:30.407646Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:30.768936Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:30.781060Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:31.132147Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:31.144350Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:31.508317Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:31.520348Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:31.871928Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:31.886631Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:32.240465Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:32.252522Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:32.598123Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:32.610348Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:32.977355Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:32.989415Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:33.341466Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:33.353887Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:33.738950Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:33.750768Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:34.111609Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:34.123853Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:34.484767Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:34.496567Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:34.857781Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:34.869694Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:35.242268Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:35.254403Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:35.667975Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:35.680114Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:36.041799Z node 2 :KESUS_TABLET DEBUG: tx_self_check. ... UG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:53.582952Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:53.610286Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:53.610833Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=17497721312081813581, session=0, seqNo=0) 2026-02-19T08:26:53.610980Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:53.625470Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=17497721312081813581, session=1) 2026-02-19T08:26:53.625854Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=18007082714746022397, session=0, seqNo=0) 2026-02-19T08:26:53.626004Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:26:53.638129Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=18007082714746022397, session=2) 2026-02-19T08:26:53.638392Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2026-02-19T08:26:53.638518Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:53.638583Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:53.652064Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:137:2161], cookie=111) 2026-02-19T08:26:53.652413Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:53.652729Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:137:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-02-19T08:26:53.652812Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2026-02-19T08:26:53.665384Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:137:2161], cookie=222) 2026-02-19T08:26:53.665461Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:137:2161], cookie=333) 2026-02-19T08:26:53.666016Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:153:2175], cookie=7165992086377678033, name="Lock1") 2026-02-19T08:26:53.666116Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:153:2175], cookie=7165992086377678033) 2026-02-19T08:26:54.062987Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:54.063116Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:54.084226Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:54.084450Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:54.109242Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:54.109743Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=2714167645285538310, session=0, seqNo=0) 2026-02-19T08:26:54.109895Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:54.122007Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=2714167645285538310, session=1) 2026-02-19T08:26:54.122282Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=14793840823678888676, session=0, seqNo=0) 2026-02-19T08:26:54.122390Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:26:54.136009Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=14793840823678888676, session=2) 2026-02-19T08:26:54.136749Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:54.136914Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:54.137000Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:54.150330Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-02-19T08:26:54.150651Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:26:54.150894Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-02-19T08:26:54.150948Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-02-19T08:26:54.163887Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-02-19T08:26:54.163989Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=333) 2026-02-19T08:26:54.164566Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:156:2178], cookie=1835106659608879312, name="Lock1") 2026-02-19T08:26:54.164676Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:156:2178], cookie=1835106659608879312) 2026-02-19T08:26:54.165132Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:159:2181], cookie=16819810355461554677, name="Lock1") 2026-02-19T08:26:54.165208Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:159:2181], cookie=16819810355461554677) 2026-02-19T08:26:54.180181Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:54.180316Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:54.180832Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:54.181473Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:54.218609Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:54.218789Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:54.219292Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:198:2211], cookie=14530492728689591836, name="Lock1") 2026-02-19T08:26:54.219388Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:198:2211], cookie=14530492728689591836) 2026-02-19T08:26:54.219955Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:206:2218], cookie=5107868073338259358, name="Lock1") 2026-02-19T08:26:54.220036Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:206:2218], cookie=5107868073338259358) 2026-02-19T08:26:54.710889Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:54.710979Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:54.723624Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:54.724172Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:54.759763Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:54.760362Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=9153784443138504423, session=0, seqNo=0) 2026-02-19T08:26:54.760523Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:54.778570Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=9153784443138504423, session=1) 2026-02-19T08:26:54.778922Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=2395721979087412839, session=0, seqNo=0) 2026-02-19T08:26:54.779076Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-02-19T08:26:54.792858Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=2395721979087412839, session=2) 2026-02-19T08:26:54.793235Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-02-19T08:26:54.793401Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-02-19T08:26:54.793514Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-02-19T08:26:54.810658Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2026-02-19T08:26:54.811076Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-02-19T08:26:54.811447Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=333, name="Lock1") 2026-02-19T08:26:54.811537Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-02-19T08:26:54.824284Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2026-02-19T08:26:54.824381Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=333) |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpComputeSchedulerService::FeatureFlagOnStart+Enabled [GOOD] >> KqpComputeSchedulerService::FeatureFlagOnStart-Enabled >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2026-02-19T08:26:52.604302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489950472129214:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:52.604935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:52.830294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:52.830402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:52.863221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:52.878985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:52.914453Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:52.917911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489950472129169:2081] 1771489612600143 != 1771489612600146 2026-02-19T08:26:52.991795Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d0bc1e11080] stream accepted Name# Session ok# true peer# ipv6:[::1]:36246 2026-02-19T08:26:52.994989Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d0bc1e11080] facade attach Name# Session actor# [1:7608489950472129845:2386] peer# ipv6:[::1]:36246 2026-02-19T08:26:52.995023Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d0bc1e11080] facade read Name# Session peer# ipv6:[::1]:36246 2026-02-19T08:26:52.995259Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d0bc1e11080] read finished Name# Session ok# true data# peer# ipv6:[::1]:36246 2026-02-19T08:26:52.996632Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2026-02-19T08:26:52.996688Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d0bc1e11080] facade write Name# Session data# peer# ipv6:[::1]:36246 2026-02-19T08:26:52.997006Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d0bc1e11080] facade finish Name# Session peer# ipv6:[::1]:36246 grpc status# (0) message# 2026-02-19T08:26:52.997026Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d0bc1e11080] write finished Name# Session ok# true peer# ipv6:[::1]:36246 2026-02-19T08:26:52.997344Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d0bc1e11080] stream done notification Name# Session ok# true peer# ipv6:[::1]:36246 2026-02-19T08:26:52.997393Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d0bc1e11080] stream finished Name# Session ok# true peer# ipv6:[::1]:36246 grpc status# (0) message# 2026-02-19T08:26:52.997510Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d0bc1e11080] deregistering request Name# Session peer# ipv6:[::1]:36246 (finish done) |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionStealing |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [GOOD] >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestSessionStealingSameKey |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpComputeSchedulerService::FeatureFlagOnStart-Enabled [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2026-02-19T08:26:34.523190Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:34.523323Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:34.540659Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:34.540793Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:34.555586Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:34.556042Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2159], cookie=15409139428640363527, session=0, seqNo=0) 2026-02-19T08:26:34.556199Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:34.578502Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2159], cookie=15409139428640363527, session=1) 2026-02-19T08:26:34.579279Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:135:2159], cookie=4107615925019637188 2026-02-19T08:26:34.579738Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:148:2170], cookie=2993642407244926763) 2026-02-19T08:26:34.579818Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:148:2170], cookie=2993642407244926763) 2026-02-19T08:26:35.006421Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:35.018985Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:35.380445Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:35.392428Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:35.738181Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:35.750525Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:36.102409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:36.114337Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:36.486105Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:36.499427Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:36.850518Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:36.862797Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:37.204175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:37.216429Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:37.567631Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:37.579873Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:37.931040Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:37.943149Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:38.337729Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:38.349646Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:38.710306Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:38.722797Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:39.084656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.096680Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:39.458660Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.477088Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:39.839087Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.851467Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:40.254969Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:40.267112Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:40.627822Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:40.640528Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.001529Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.014066Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.370561Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.382921Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.745332Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.757567Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:42.143447Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:42.155532Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:42.532299Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:42.544745Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:42.912385Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:42.924845Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:43.288997Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:43.301294Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:43.666450Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:43.685231Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.047013Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.059358Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.428927Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.441120Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.814025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.830827Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:45.197892Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:45.211918Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:45.589681Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:45.602667Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:45.995449Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.010566Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:46.391773Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.404046Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:46.766015Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.778404Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.139328Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.154366Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.522140Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.534392Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.920728Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.932880Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:48.300327Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:48.313183Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:48.682653Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:48.696667Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.077601Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.090009Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.460215Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.472476Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.859829Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.872634Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.238117Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:50.254135Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.613093Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:50.635003Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.991473Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:51.003595Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:51.374005Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:51.386293Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:51.784132Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:51.805600Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:52.181790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:52.194403Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:52.562435Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:52.575474Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:52.945883Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:52.958604Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:53.322180Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:53.334882Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:53.730319Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:53.742596Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:54.096882Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:54.109319Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:54.480655Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:54.494495Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:54.866047Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:54.882406Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:55.250029Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:55.262362Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:55.638231Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:55.650615Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:56.012247Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:56.024154Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:56.377912Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:56.391160Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:56.776191Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:56.790680Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:57.164310Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:57.182900Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:57.596998Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-02-19T08:26:57.597110Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:26:57.611963Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-02-19T08:26:57.623163Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:539:2486], cookie=1071843722604542439) 2026-02-19T08:26:57.623283Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:539:2486], cookie=1071843722604542439) 2026-02-19T08:26:58.154348Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:58.154456Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:58.171113Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:58.171235Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:58.207707Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:58.208682Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2159], cookie=12345, session=0, seqNo=0) 2026-02-19T08:26:58.208848Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:58.221610Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2159], cookie=12345, session=1) 2026-02-19T08:26:58.222411Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:142:2164], cookie=23456, session=1, seqNo=0) 2026-02-19T08:26:58.235236Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:142:2164], cookie=23456, session=1) 2026-02-19T08:26:58.610211Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:58.610324Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:58.628540Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:58.628664Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:58.653619Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:58.654526Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=12345, session=0, seqNo=0) 2026-02-19T08:26:58.654694Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:58.666966Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=12345, session=1) 2026-02-19T08:26:58.667699Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:144:2166], cookie=23456, session=1, seqNo=0) 2026-02-19T08:26:58.680124Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:144:2166], cookie=23456, session=1) 2026-02-19T08:26:59.056384Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:59.056464Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:59.070294Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:59.070461Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:59.096139Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:59.097077Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-02-19T08:26:59.097241Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:59.109452Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-02-19T08:26:59.110281Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=23456, session=1, seqNo=0) 2026-02-19T08:26:59.123093Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=23456, session=1) 2026-02-19T08:26:59.522890Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:59.523002Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:59.542925Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:59.543508Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:59.578552Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:59.579389Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12345, session=0, seqNo=0) 2026-02-19T08:26:59.579536Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:59.591615Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12345, session=1) 2026-02-19T08:26:59.592386Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:144:2166], cookie=23456, session=1, seqNo=0) 2026-02-19T08:26:59.604534Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:144:2166], cookie=23456, session=1) |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] Test command err: 2026-02-19T08:22:28.499970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488817576309884:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:28.500052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:28.531641Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:28.699991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:28.729593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:28.729746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:28.758659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:28.764770Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:28.766244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488817576309854:2081] 1771489348498685 != 1771489348498688 2026-02-19T08:22:28.853391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f43/r3tmp/yandexRICODl.tmp 2026-02-19T08:22:28.853424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f43/r3tmp/yandexRICODl.tmp 2026-02-19T08:22:28.853637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f43/r3tmp/yandexRICODl.tmp 2026-02-19T08:22:28.853765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:28.880986Z INFO: TTestServer started on Port 15237 GrpcPort 23505 2026-02-19T08:22:28.934414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:23505 2026-02-19T08:22:29.038619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:29.096294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:29.505592Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:30.918365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488826166245494:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:30.918381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488826166245482:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:30.918543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:30.918962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488826166245500:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:30.919156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:30.921809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:30.930234Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488826166245498:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:22:31.084174Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488826166245564:2639] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:31.104305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:31.131231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:31.193843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:31.211468Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488830461212868:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:31.212472Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NTUxNDY4NDAtNDcxMjk5Zi1kNzIyNzZiNy03MzBkYWI0NQ==, ActorId: [1:7608488826166245465:2333], ActorState: ExecuteState, LegacyTraceId: 01khtfwn844sa8qrhvz28mrdep, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:31.214668Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488830461213151:2816] 2026-02-19T08:22:33.500250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488817576309884:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:33.500309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:37.246527Z :SimpleBlockingKeyedWriteSession_NoSeqNo INFO: TTopicSdkTestSetup started 2026-02-19T08:22:37.258133Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:37.286512Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:37.286514Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037893] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:37.288439Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:37.288841Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:37.288982Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:22:37.289007Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:37.289032Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037892] doesn't have tx writes info 2026-02-19T08:22:37.289104Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037893] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:37.289180Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037893] no config, start with empty partitions and default config 2026-02-19T08:22:37.289198Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:37.289209Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037893] doesn't have tx writes info 2026-02-19T08:22:37.289258Z node 1 :PERSQUEUE DEBUG: ... onsumer session test-consumer_17_1_14777600682474578782_v1 is DEAD 2026-02-19T08:26:56.585925Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:56.586271Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:56.586236Z node 17 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [17:7608489932158632134:2902] disconnected. 2026-02-19T08:26:56.586288Z node 17 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [17:7608489932158632134:2902] disconnected; active server actors: 1 2026-02-19T08:26:56.586319Z node 17 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [17:7608489932158632134:2902] client test-consumer disconnected session test-consumer_17_1_14777600682474578782_v1 2026-02-19T08:26:56.586435Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_17_1_14777600682474578782_v1 2026-02-19T08:26:56.586485Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [17:7608489932158632137:2905] destroyed 2026-02-19T08:26:56.586538Z node 17 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_17_1_14777600682474578782_v1 2026-02-19T08:26:56.586954Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:26:56.587009Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:26:56.587061Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:26:56.587262Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:26:56.587314Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:26:56.594061Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0 grpc read done: success: 0 data: 2026-02-19T08:26:56.594115Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0 grpc read failed 2026-02-19T08:26:56.594159Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0 grpc closed 2026-02-19T08:26:56.594187Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|6a825e62-c8ea5892-7b6fd82f-896b072c_0 is DEAD 2026-02-19T08:26:56.595669Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:56.595734Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:56.595950Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [17:7608489932158632042:2896] destroyed 2026-02-19T08:26:56.595991Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [17:7608489932158632045:2896] destroyed 2026-02-19T08:26:56.596032Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:56.596073Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.596104Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.596133Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.596168Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.596202Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.600579Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0 grpc read done: success: 0 data: 2026-02-19T08:26:56.600633Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0 grpc read failed 2026-02-19T08:26:56.600707Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0 2026-02-19T08:26:56.600739Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|312f0158-981bf9fd-4e259218-765e4ab2_0 is DEAD 2026-02-19T08:26:56.601241Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:56.601308Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:26:56.601702Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [17:7608489932158631999:2888] destroyed 2026-02-19T08:26:56.601759Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [17:7608489932158632002:2888] destroyed 2026-02-19T08:26:56.601800Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:26:56.601850Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.601882Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.601911Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.601948Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.601975Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.612964Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.613019Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613043Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.613071Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613094Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.613153Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.613165Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613176Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.613188Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613198Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.613422Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.613445Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613466Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.613480Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.613491Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.713282Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.713347Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713376Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.713412Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713440Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.713504Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.713518Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713528Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.713542Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713555Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:26:56.713767Z node 17 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:26:56.713781Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713793Z node 17 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:26:56.713809Z node 17 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:26:56.713821Z node 17 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] Test command err: 2026-02-19T08:26:42.855428Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489907419348134:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:42.855805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:43.084989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:43.085080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:43.108914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:43.122359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:43.166937Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:43.168088Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489907419348094:2081] 1771489602853648 != 1771489602853651 2026-02-19T08:26:43.224062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:43.224096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:43.224107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:43.224196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:43.355926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:43.383659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:43.863062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:45.533645Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-02-19T08:26:45.533702Z node 1 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:47: Enabled on start 2026-02-19T08:26:45.537451Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=Y2U4YzljZDItNjljZDdiNTEtMzQ3N2Y1MmUtMjNhMGRiYmU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Y2U4YzljZDItNjljZDdiNTEtMzQ3N2Y1MmUtMjNhMGRiYmU= temp_dir_name# 27ecff08-4aa0-e29e-770a-1a8e32644d10 trace_id# 2026-02-19T08:26:45.541195Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7608489920304250858:2326], Start check tables existence, number paths: 2 2026-02-19T08:26:45.542061Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-02-19T08:26:45.542104Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-02-19T08:26:45.542131Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:79: Subscribed to config changes 2026-02-19T08:26:45.542141Z node 1 :KQP_COMPUTE_SCHEDULER INFO: kqp_compute_scheduler_service.cpp:86: Become enabled 2026-02-19T08:26:45.542204Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:309} SessionId: ydb://session/3?node_id=1&id=Y2U4YzljZDItNjljZDdiNTEtMzQ3N2Y1MmUtMjNhMGRiYmU=, ActorId: [1:7608489920304250865:2333], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-02-19T08:26:45.545078Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7608489920304250858:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-02-19T08:26:45.545134Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7608489920304250858:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-02-19T08:26:45.545166Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7608489920304250858:2326], Successfully finished 2026-02-19T08:26:45.545367Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-02-19T08:26:45.545537Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-02-19T08:26:45.548558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-02-19T08:26:45.563496Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-02-19T08:26:45.566505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:45.568598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710659 2026-02-19T08:26:45.570996Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-02-19T08:26:45.589003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:26:45.684714Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-02-19T08:26:45.717009Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608489920304251091:2660] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:45.717088Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304250937:2533], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-02-19T08:26:45.720638Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Start pool creating 2026-02-19T08:26:45.722341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:45.723076Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Subscribe on create pool tx: 281474976710661 2026-02-19T08:26:45.723256Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Tablet to pipe successfully connected 2026-02-19T08:26:45.727210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-02-19T08:26:45.795433Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Start pool creating 2026-02-19T08:26:45.797619Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608489920304251201:2768] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/zero_pool\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:45.797703Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608489920304251124:2692], DatabaseId: Root, PoolId: zero_pool, Pool successfully created 2026-02-19T08:26:45.798116Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:101: Add database: /Root 2026-02-19T08:26:45.800091Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:304} SessionId: ydb://session/3?node_id=1&id=ZThiOGZkY2QtZjBlYTdhMzEtMzM5NTRlYy1hMTdmYjc3Nw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZThiOGZkY2QtZjBlYTdhMzEtMzM5NTRlYy1hMTdmYjc3Nw== temp_dir_name# 72d89f8b-4f87-8e0e-9253-9ca6f0223f42 trace_id# 2026-02-19T08:26:45.800185Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:309} SessionId: ydb://session/3?node_id=1&id=ZThiOGZkY2QtZjBlYTdhMzEtMzM5NTRlYy1hMTdmYjc3Nw==, ActorId: [1:7608489920304251211:2341], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-02-19T08:26:45.800352Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:122: Add pool: /Root/zero_pool 2026-02-19T08:26:45.800380Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: zero_pool 2026-02-19T08:26:45.800393Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-02-19T08:26:45.800558Z node 1 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:122: Add pool: /Root/zero_pool 2026-02-19T08:26:45.800601Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:542} SessionId: ydb://session/3?node_id=1&id=ZThiOGZkY2QtZjBlYTdhMzEtMzM5NTRlYy1hMTdmYjc3Nw==, ActorId: [1:7608489920304251211:2341], ActorState: ReadyState, Legacy ... : {KQPSA@kqp_session_actor.cpp:542} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ReadyState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [2:7608489981709847473:2761] database# Root database_id# /Root pool_id# zero_pool trace_id# 2026-02-19T08:26:59.721484Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608489981709847476:2343], DatabaseId: /Root, PoolId: zero_pool, Start pool fetching 2026-02-19T08:26:59.721515Z node 2 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:122: Add pool: /Root/zero_pool 2026-02-19T08:26:59.721528Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [2:7608489981709847474:2342], DatabaseId: /Root, PoolId: zero_pool, SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM= 2026-02-19T08:26:59.721553Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: zero_pool 2026-02-19T08:26:59.721572Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7608489981709847477:2344], Database: /Root, Start database fetching 2026-02-19T08:26:59.721802Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7608489981709847477:2344], Database: /Root, Database info successfully fetched, serverless: 0 2026-02-19T08:26:59.721863Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-02-19T08:26:59.721936Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [2:7608489981709847484:2345], DatabaseId: /Root, PoolId: zero_pool, SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, Start pool fetching 2026-02-19T08:26:59.721975Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608489981709847486:2346], DatabaseId: /Root, PoolId: zero_pool, Start pool fetching 2026-02-19T08:26:59.722776Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608489981709847486:2346], DatabaseId: /Root, PoolId: zero_pool, Pool info successfully fetched 2026-02-19T08:26:59.722793Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608489981709847476:2343], DatabaseId: /Root, PoolId: zero_pool, Pool info successfully fetched 2026-02-19T08:26:59.722834Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [2:7608489981709847484:2345], DatabaseId: /Root, PoolId: zero_pool, SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, Pool info successfully resolved 2026-02-19T08:26:59.722870Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool zero_pool, DatabaseId: /Root 2026-02-19T08:26:59.722884Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/zero_pool 2026-02-19T08:26:59.723083Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool zero_pool, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM= 2026-02-19T08:26:59.723107Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 44] 2026-02-19T08:26:59.723181Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:203: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Received new request, worker id: [2:7608489981709847474:2342], session id: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM= 2026-02-19T08:26:59.723226Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:313: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Reply continue success to [2:7608489981709847474:2342], session id: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, local in flight: 1 2026-02-19T08:26:59.723251Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:297: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: zero_pool, SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM= 2026-02-19T08:26:59.723312Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:636} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Continue request pool_id# zero_pool trace_id# 2026-02-19T08:26:59.723413Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:750} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Sending CompileQuery request trace_id# 2026-02-19T08:26:59.724047Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Got watch notification 2026-02-19T08:26:59.783011Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1820} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136875516311256 trace_id# 2026-02-19T08:26:59.783068Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1991} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Sending to Executer span_id_size# 8 trace_id# 2026-02-19T08:26:59.783209Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2062} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Created new KQP executer executer_id# [2:7608489981709847545:2342] is_rollback# false trace_id# 2026-02-19T08:26:59.783336Z node 2 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:101: Add database: /Root 2026-02-19T08:26:59.783352Z node 2 :KQP_COMPUTE_SCHEDULER DEBUG: kqp_compute_scheduler_service.cpp:122: Add pool: /Root/zero_pool 2026-02-19T08:26:59.785077Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2587} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Forwarded TEvStreamData to [2:7608489981709847473:2761] trace_id# 2026-02-19T08:26:59.785816Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2467} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-02-19T08:26:59.785926Z node 2 :KQP_COMPUTE_SCHEDULER ERROR: kqp_compute_scheduler_service.cpp:196: Trying to remove unknown query: 281474976710663 2026-02-19T08:26:59.785967Z node 2 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2795} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, TxInfo status# Committed kind# Pure total_duration# 3.284 server_duration# 2.988 queries_count# 2 trace_id# 2026-02-19T08:26:59.786035Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2969} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-02-19T08:26:59.786249Z node 2 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3378} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: ExecuteState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-02-19T08:26:59.786296Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:233: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Received cleanup request, worker id: [2:7608489981709847474:2342], session id: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, duration: 0.064488s, cpu consumed: 0.000843s 2026-02-19T08:26:59.786338Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:437: [WorkloadService] [TPoolHandlerActorBase] ActorId: [2:7608489981709847491:2347], DatabaseId: /Root, PoolId: zero_pool, Reply cleanup success to [2:7608489981709847474:2342], session id: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, local in flight: 0 2026-02-19T08:26:59.786389Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3455} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: CleanupState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, EndCleanup is_final# true trace_id# 2026-02-19T08:26:59.786393Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:342: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: zero_pool, Duration: 0.064488s, CpuConsumed: 0.000843s, AdjustCpuQuota: 0 2026-02-19T08:26:59.786435Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3170} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: CleanupState, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Sent query response back to proxy proxy_request_id# 3 proxy_id# [2:7608489968824944638:2269] trace_id# 2026-02-19T08:26:59.786469Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3469} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: unknown state, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-02-19T08:26:59.786532Z node 2 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3564} SessionId: ydb://session/3?node_id=2&id=Zjc0ZmU5ZWItY2Y4OGE5NTgtMjE3ZWM0OTAtOWI3ZWFmNmM=, ActorId: [2:7608489981709847474:2342], ActorState: unknown state, LegacyTraceId: 01khtg4vr99j9y4fch0cxnnp79, Session actor destroyed trace_id# |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpBatchDelete::MultiStatement [GOOD] >> THealthCheckTest::SpecificServerless >> THealthCheckTest::TestNoSchemeShardResponse >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::Basic >> THealthCheckTest::OneIssueListing >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> THealthCheckTest::Issues100Groups100VCardListing >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TOlap::CreateStoreWithDirs >> TOlapNaming::AlterColumnTableFailed >> THealthCheckTest::StaticGroupIssue >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> TOlap::StoreStatsQuota >> TOlap::StoreStats >> TOlap::CreateTableWithNullableKeysNotAllowed >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] >> TOlap::CreateDropStandaloneTable >> TOlapNaming::CreateColumnStoreFailed >> TOlapNaming::CreateColumnTableOk >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: 2026-02-19T08:24:21.998717Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489301672929253:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:22.000292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:22.294695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:22.294767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:22.339126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:22.365861Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489301672929198:2081] 1771489461981569 != 1771489461981572 2026-02-19T08:24:22.367446Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:22.383005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:22.503563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:22.503603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:22.503613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:22.503694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:22.540070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:25.389126Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006ce/r3tmp/spilling-tmp-runner/node_1_af9116cd-647ce6d2-a9dd6190-4159cd7f, actor: [1:7608489318852800214:2385] 2026-02-19T08:24:25.389321Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006ce/r3tmp/spilling-tmp-runner 2026-02-19T08:24:27.516919Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.516974Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517004Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517030Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517054Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517087Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517125Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517163Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517203Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517243Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517283Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:27.517337Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg05pa1ymmqypjwenn59ac, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4YzdjOGQtZGIwMzZkMzktNTAwNmNlNTItYzhiNTU4MTQ=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:27.517655Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489327442735886:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg05pa1ymmqypjwenn59ac, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4YzdjOGQtZGIwMzZkMzktNTAwNmNlNTItYzhiNTU4MTQ=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:27.517689Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489327442735886:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg05pa1ymmqypjwenn59ac, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4YzdjOGQtZGIwMzZkMzktNTAwNmNlNTItYzhiNTU4MTQ=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:27.518739Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:27.518896Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.518943Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.518983Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519032Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519085Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519126Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519181Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519230Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519294Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:27.519347Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... 89991188550812:2542], trace_id# 2026-02-19T08:27:01.530554Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [13:7608489991188550808:2538], CA [13:7608489991188550809:2539], CA [13:7608489991188550810:2540], CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.530672Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [13:7608489991188550808:2538] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1257 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 792 FinishTimeMs: 1771489621526 EgressBytes: 495 EgressRows: 9 ComputeCpuTimeUs: 20 BuildCpuTimeUs: 772 HostName: "ghrun-fkrokuvqke" NodeId: 13 CreateTimeMs: 1771489621520 UpdateTimeMs: 1771489621526 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:01.530697Z node 13 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [13:7608489991188550808:2538] 2026-02-19T08:27:01.530742Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [13:7608489991188550809:2539], CA [13:7608489991188550810:2540], CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.530786Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [13:7608489991188550809:2539], CA [13:7608489991188550810:2540], CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.530901Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [13:7608489991188550809:2539] TaskId# 8 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 955 Tasks { TaskId: 8 StageId: 7 CpuTimeUs: 576 FinishTimeMs: 1771489621526 EgressBytes: 207 EgressRows: 9 ComputeCpuTimeUs: 14 BuildCpuTimeUs: 562 HostName: "ghrun-fkrokuvqke" NodeId: 13 CreateTimeMs: 1771489621521 UpdateTimeMs: 1771489621526 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:01.530928Z node 13 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [13:7608489991188550809:2539] 2026-02-19T08:27:01.530970Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [13:7608489991188550810:2540], CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531007Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [13:7608489991188550810:2540], CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531114Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [13:7608489991188550810:2540] TaskId# 9 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1150 Tasks { TaskId: 9 StageId: 8 CpuTimeUs: 666 FinishTimeMs: 1771489621526 EgressBytes: 270 EgressRows: 10 ComputeCpuTimeUs: 20 BuildCpuTimeUs: 646 HostName: "ghrun-fkrokuvqke" NodeId: 13 CreateTimeMs: 1771489621521 UpdateTimeMs: 1771489621526 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:01.531143Z node 13 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [13:7608489991188550810:2540] 2026-02-19T08:27:01.531175Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531204Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [13:7608489991188550811:2541], CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531313Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [13:7608489991188550811:2541] TaskId# 10 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1047 Tasks { TaskId: 10 StageId: 9 CpuTimeUs: 595 FinishTimeMs: 1771489621526 EgressBytes: 480 EgressRows: 16 ComputeCpuTimeUs: 24 BuildCpuTimeUs: 571 HostName: "ghrun-fkrokuvqke" NodeId: 13 CreateTimeMs: 1771489621522 UpdateTimeMs: 1771489621526 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:01.531338Z node 13 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [13:7608489991188550811:2541] 2026-02-19T08:27:01.531366Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531393Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [13:7608489991188550812:2542], trace_id# 2026-02-19T08:27:01.531894Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [13:7608489991188550812:2542] TaskId# 11 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2230 Tasks { TaskId: 11 StageId: 10 CpuTimeUs: 1564 FinishTimeMs: 1771489621528 EgressBytes: 1848 EgressRows: 56 ComputeCpuTimeUs: 42 BuildCpuTimeUs: 1522 HostName: "ghrun-fkrokuvqke" NodeId: 13 CreateTimeMs: 1771489621523 UpdateTimeMs: 1771489621528 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:01.531933Z node 13 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [13:7608489991188550812:2542] 2026-02-19T08:27:01.531991Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [13:7608489991188550798:2404] trace_id# 2026-02-19T08:27:01.532053Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.015233s ReadRows: 0 ReadBytes: 0 RequestUnits# 10 ForceFlag# true trace_id# 2026-02-19T08:27:01.563083Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:27:01.563259Z node 13 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [13:7608489991188550799:2404] TxId: 281474976710672. Ctx: { TraceId: 01khtg4tk0378gfntscaydt6dv, Database: /Root, SessionId: ydb://session/3?node_id=13&id=MmY4OWFmZTYtZWE5MjUwZS03OTZiOWExZi04MTBhZGQ2NQ==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:23:51.393097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:23:51.512853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:23:51.534142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:23:51.534232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:23:51.534794Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:23:51.834068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:23:51.834236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:23:51.921518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489428152670 != 1771489428152674 2026-02-19T08:23:51.932439Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:51.980600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:23:52.087701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:23:52.407657Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:837:2731], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.407742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.407811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:52.407900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:835:2729], Recipient [1:404:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-02-19T08:23:52.407932Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:23:52.532708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:23:52.532945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.533157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:23:52.533218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:23:52.533434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:23:52.533522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:52.533622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.534250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:23:52.534397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:23:52.534443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.534469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.534623Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.534666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.534730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.534786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:23:52.534828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:23:52.534863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:23:52.534959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.535343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.535372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.535468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.535502Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.535554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.535586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:23:52.535640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:23:52.535694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.535963Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.535995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-02-19T08:23:52.536099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:404:2403], Recipient [1:404:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.536135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:23:52.536183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.536223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:23:52.536262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:23:52.536298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-02-19T08:23:52.536340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:23:52.539370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:23:52.539838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:23:52.539885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:23:52.540044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:23:52.540304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269091328, Sender [1:400:2399], Recipient [1:404:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-02-19T08:23:52.540683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:843:2737], Recipient [1:404:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.540740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:23:52.540778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:23:52.540911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124996, Sender [1:835:2729], Recipient [1:404:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-02-19T08:23:52.540944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-02-19T08:23:52.541006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight ... 366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.609879Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.609915Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.620446Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:405:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:27:03.620539Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:27:03.620576Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-02-19T08:27:03.620661Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 2 2026-02-19T08:27:03.620701Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-02-19T08:27:03.620823Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 60 shard idx 72057594046644480:11 data size 271 row count 13 2026-02-19T08:27:03.620896Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 60], pathId map=SequenceTable, is column=0, is olap=0, RowCount 13, DataSize 271, with borrowed parts 2026-02-19T08:27:03.620933Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2026-02-19T08:27:03.621037Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2026-02-19T08:27:03.621094Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 68 shard idx 72057594046644480:17 data size 368 row count 16 2026-02-19T08:27:03.621138Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 68], pathId map=SequenceTable, is column=0, is olap=0, RowCount 16, DataSize 368, with borrowed parts 2026-02-19T08:27:03.621167Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2026-02-19T08:27:03.621224Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2026-02-19T08:27:03.621292Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:27:03.631946Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:405:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:27:03.632029Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:27:03.632065Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:27:03.642575Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.642657Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.642699Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.642740Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.642778Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037901][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.663662Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.663725Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.663761Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.663791Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.663817Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.674336Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.674420Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.674453Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.674493Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.674527Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.684992Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.685065Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.685093Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.685122Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.685149Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037901][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.706002Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.706083Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.706119Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.706160Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.706196Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.716711Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.716785Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.716821Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.716860Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.716894Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.727521Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.727617Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.727657Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.727706Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.727754Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037901][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.748664Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.748728Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.748754Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.748784Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.748811Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.759207Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.759286Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.759322Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.759356Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.759385Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:27:03.769856Z node 11 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:03.769929Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.769964Z node 11 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:03.769999Z node 11 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:03.770033Z node 11 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037901][Partition][0][StateIdle] Try persist 2026-02-19T08:27:04.129276Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037907, clientId# [11:3247:4464], serverId# [11:3248:4465], sessionId# [0:0:0] { items { uint32_value: 6 } items { uint32_value: 600 } }, { items { uint32_value: 7 } items { uint32_value: 70 } }, { items { uint32_value: 8 } items { uint32_value: 80 } }, { items { uint32_value: 9 } items { uint32_value: 90 } } >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CustomDefaultPresets >> KqpBatchUpdate::ManyPartitions_2 [GOOD] >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes |94.1%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] >> TStreamingQueryTest::CreateStreamingQuery >> TOlap::CreateTableTtl [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [GOOD] >> TOlap::CustomDefaultPresets [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::QSReplySize-useSink [GOOD] >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:04.710828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:04.710919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.711020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:04.711071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:04.711119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:04.711166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:04.711217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:04.711273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:04.711342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.711419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:04.712392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:04.712754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:04.807260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:04.807329Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:04.820273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:04.820881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:04.821122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:04.833475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:04.834238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:04.835036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:04.835405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:04.838920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:04.839126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:04.840506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:04.840585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:04.840702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:04.840753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:04.840821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:04.841024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:04.991636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.992853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.992996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.993937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.994034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:04.994108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... dId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000042 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } IsRestore: false } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2026-02-19T08:27:09.132888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:09.133184Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.133565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 38], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-02-19T08:27:09.133618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 0 2026-02-19T08:27:09.133654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 106:0 type: TxCreateColumnTable target path: [OwnerId: 72057594046678944, LocalPathId: 43] source path: 2026-02-19T08:27:09.133892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-02-19T08:27:09.134142Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:27:09.134188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:725) 2026-02-19T08:27:09.134311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:27:09.134373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-02-19T08:27:09.136372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 43, at schemeshard: 72057594046678944 2026-02-19T08:27:09.136574Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2026-02-19T08:27:09.136803Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:09.136848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:27:09.137047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-02-19T08:27:09.137132Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:09.137167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2213], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-02-19T08:27:09.137206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2213], at schemeshard: 72057594046678944, txId: 106, path id: 43 2026-02-19T08:27:09.137497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.137541Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:237: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2026-02-19T08:27:09.137673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:323: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2026-02-19T08:27:09.138365Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:09.138448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:09.138482Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:27:09.138515Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-02-19T08:27:09.138553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-02-19T08:27:09.139706Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:09.139768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:09.139791Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:27:09.139814Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 1 2026-02-19T08:27:09.139856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-02-19T08:27:09.139937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-02-19T08:27:09.140822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2026-02-19T08:27:09.140948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2026-02-19T08:27:09.141935Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;self_id=[3:719:2703];ev=NActors::IEventHandle;tablet_id=72075186233409546;tx_id=106;this=136442244125280;method=TTxController::StartProposeOnExecute;tx_info=106:TX_KIND_SCHEMA;min=5000043;max=18446744073709551615;plan=0;src=[3:129:2153];cookie=12:5;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-02-19T08:27:09.142571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:27:09.143327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.193274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.193353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.193438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.193482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.193546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.193588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.193651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.193686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.193768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.193838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.194730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.195055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.290422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.290479Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.302629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.303299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.303519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.314939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.315645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.316390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.316710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.319860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.320060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.321250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.321315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.321448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.321507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.321560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.321758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.481864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.483958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.484045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.484117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... _SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2210], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-02-19T08:27:09.057063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.057121Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:27:09.057187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-02-19T08:27:09.057966Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:09.058076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:09.058120Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:27:09.058167Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:27:09.058217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:27:09.058769Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:09.058845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:09.058876Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:27:09.058907Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:27:09.058938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:27:09.059005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:27:09.060482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-02-19T08:27:09.060561Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-02-19T08:27:09.060643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-02-19T08:27:09.061392Z node 3 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-02-19T08:27:09.061503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6496: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.061638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-02-19T08:27:09.062579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:27:09.062667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:27:09.063777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.076015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-02-19T08:27:09.076089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:27:09.076237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:27:09.078053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.078203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.078247Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:27:09.078375Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:09.078414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:09.078458Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:09.078495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:09.078535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:27:09.078625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:747:2722] message: TxId: 102 2026-02-19T08:27:09.078683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:09.078730Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:27:09.078767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:27:09.078909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:27:09.080596Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:09.080649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:809:2776] TestWaitNotification: OK eventTxId 102 2026-02-19T08:27:09.081188Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:09.081492Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 333us result status StatusSuccess 2026-02-19T08:27:09.082038Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } IsRestore: false } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [GOOD] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] >> TStreamingQueryTest::CreateStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists >> TOlap::AlterTtl [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.390027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.390110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.390174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.390222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.390281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.390319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.390374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.390401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.390459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.390524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.391355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.391659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.481327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.481398Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.493672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.494390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.494600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.506754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.507476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.508160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.508431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.511309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.511480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.512398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.512448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.512537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.512575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.512609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.512728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.663341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.664539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.664724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.664806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.664965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.665889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ansactionResult> complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.377541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.377602Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:149: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2026-02-19T08:27:10.377686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2026-02-19T08:27:10.377853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000042 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:27:10.379419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2026-02-19T08:27:10.379556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000043 2026-02-19T08:27:10.379893Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:10.380011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 12884904049 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:10.380071Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:110: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000043 2026-02-19T08:27:10.380888Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 106:0 Propose -> ProposedWaitParts 2026-02-19T08:27:10.381137Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:27:10.381206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:27:10.382028Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=column_engine_logs.cpp:287;event=double_schema_version;v=1; 2026-02-19T08:27:10.396281Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:10.396344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:27:10.396553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:27:10.396713Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:10.396762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2213], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-02-19T08:27:10.396808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2213], at schemeshard: 72057594046678944, txId: 106, path id: 39 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000043 2026-02-19T08:27:10.397219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.397271Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:200: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:27:10.397354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:223: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-02-19T08:27:10.398788Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:10.398916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:10.398955Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:27:10.398994Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-02-19T08:27:10.399038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:27:10.399758Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:10.399830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-02-19T08:27:10.399855Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-02-19T08:27:10.399880Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 14 2026-02-19T08:27:10.399925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:27:10.399995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-02-19T08:27:10.404824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-02-19T08:27:10.406301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:27:10.407203Z node 3 :TX_TIERING ERROR: log.cpp:841: fline=manager.cpp:170;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2026-02-19T08:27:10.407481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-02-19T08:27:10.420007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-02-19T08:27:10.420079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-02-19T08:27:10.420220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2026-02-19T08:27:10.422406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.422577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.422623Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 106:0 ProgressState 2026-02-19T08:27:10.422756Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:27:10.422796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:27:10.422841Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:27:10.422878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:27:10.422912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-02-19T08:27:10.422999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:748:2723] message: TxId: 106 2026-02-19T08:27:10.423050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:27:10.423089Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:27:10.423127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 106:0 2026-02-19T08:27:10.423271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:27:10.425187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:27:10.425247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:952:2918] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: 2026-02-19T08:24:35.987423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489361770164397:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:35.987493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:36.190584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:36.190663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:36.211695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:36.229381Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:36.230572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:36.230871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489361770164367:2081] 1771489475985899 != 1771489475985902 2026-02-19T08:24:36.329374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:36.329414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:36.329428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:36.329568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:36.399471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:39.078404Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b0/r3tmp/spilling-tmp-runner/node_1_9ccfe896-71206184-26407943-e5bf7614, actor: [1:7608489378950035403:2385] 2026-02-19T08:24:39.078567Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b0/r3tmp/spilling-tmp-runner 2026-02-19T08:24:41.160082Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160126Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160146Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160165Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160183Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160221Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160254Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160276Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160299Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160329Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160357Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:41.160401Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0k4md0ckpf6w4w1h1fy7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTUyZmVlZDItNDBjYjU5OTctOGZjOTM2Y2MtNTIwODg0ZDk=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:41.160534Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489387539971075:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0k4md0ckpf6w4w1h1fy7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTUyZmVlZDItNDBjYjU5OTctOGZjOTM2Y2MtNTIwODg0ZDk=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:41.160558Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489387539971075:2402] TxId: 281474976710672. Ctx: { TraceId: 01khtg0k4md0ckpf6w4w1h1fy7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTUyZmVlZDItNDBjYjU5OTctOGZjOTM2Y2MtNTIwODg0ZDk=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:41.160832Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:41.160994Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161057Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161110Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161174Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161261Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161302Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161369Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161413Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161486Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:41.161535Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... /3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [12:7608490013441269512:3158], CA [12:7608490013441269509:3155], CA [12:7608490013441269510:3156], CA [12:7608490013441269513:3159], CA [12:7608490013441269511:3157], trace_id# 2026-02-19T08:27:07.149903Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490013441269510:3156] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1769 DurationUs: 1000 Tasks { TaskId: 2 CpuTimeUs: 940 FinishTimeMs: 1771489627148 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 130 ReadBytes: 1040 AffectedPartitions: 13 } IngressRows: 130 ComputeCpuTimeUs: 257 BuildCpuTimeUs: 683 WaitOutputTimeUs: 1704 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489627147 CreateTimeMs: 1771489627135 UpdateTimeMs: 1771489627148 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:07.150018Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490013441269510:3156] 2026-02-19T08:27:07.150105Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490013441269512:3158], CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], CA [12:7608490013441269511:3157], trace_id# 2026-02-19T08:27:07.150166Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [12:7608490013441269512:3158], CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], CA [12:7608490013441269511:3157], trace_id# 2026-02-19T08:27:07.150448Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490013441269511:3157] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1070 DurationUs: 1000 Tasks { TaskId: 3 CpuTimeUs: 343 FinishTimeMs: 1771489627148 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 120 ReadBytes: 960 AffectedPartitions: 12 } IngressRows: 120 ComputeCpuTimeUs: 285 BuildCpuTimeUs: 58 WaitOutputTimeUs: 1828 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489627147 CreateTimeMs: 1771489627136 UpdateTimeMs: 1771489627148 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:07.150499Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490013441269511:3157] 2026-02-19T08:27:07.150551Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490013441269512:3158], CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150589Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [12:7608490013441269512:3158], CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150710Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490013441269512:3158] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 750 DurationUs: 1000 Tasks { TaskId: 4 CpuTimeUs: 191 FinishTimeMs: 1771489627148 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 119 ReadBytes: 952 AffectedPartitions: 12 } IngressRows: 119 ComputeCpuTimeUs: 151 BuildCpuTimeUs: 40 WaitOutputTimeUs: 2013 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489627147 CreateTimeMs: 1771489627137 UpdateTimeMs: 1771489627148 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:07.150736Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490013441269512:3158] 2026-02-19T08:27:07.150758Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150779Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [12:7608490013441269509:3155], CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150867Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490013441269509:3155] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2029 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1025 FinishTimeMs: 1771489627149 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 130 ReadBytes: 1040 AffectedPartitions: 13 } IngressRows: 130 ComputeCpuTimeUs: 293 BuildCpuTimeUs: 732 WaitOutputTimeUs: 1341 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489627148 CreateTimeMs: 1771489627135 UpdateTimeMs: 1771489627149 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:07.150880Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490013441269509:3155] 2026-02-19T08:27:07.150897Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150917Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [12:7608490013441269513:3159], trace_id# 2026-02-19T08:27:07.150989Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490013441269513:3159] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 4595 DurationUs: 2000 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 285 FinishTimeMs: 1771489627150 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 193 BuildCpuTimeUs: 92 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489627148 CreateTimeMs: 1771489627137 UpdateTimeMs: 1771489627150 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:07.151003Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490013441269513:3159] 2026-02-19T08:27:07.151115Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:27:07.151160Z node 12 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:27:07.151196Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [12:7608490013441269502:2543] TxId: 281474976710875. Ctx: { TraceId: 01khtg5298frxdex3k3a3qd3z0, Database: /Root, SessionId: ydb://session/3?node_id=12&id=ZDJhYzdiMjktNGQ1ZmU0MTctN2M4YWJkZjktMTcyMWU4MDc=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.010213s ReadRows: 499 ReadBytes: 3992 RequestUnits# 499 ForceFlag# true trace_id# >> TStreamingQueryTest::ParallelAlterStreamingQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:09.628558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:09.628655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:09.628733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:09.628772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:09.628818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:09.628871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:09.628900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:09.628927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:09.629007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:09.629080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:09.630328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:09.630631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:09.722402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:09.722475Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:09.740357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:09.742236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:09.742488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:09.759972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:09.760697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:09.761705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:09.762105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:09.765783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:09.766041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:09.767484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:09.767558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:09.767684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:09.767750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:09.767797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:09.767978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:09.933499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.934570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.934699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.934802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.934933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:09.935694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... FO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:27:11.714410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:11.714488Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:27:11.714537Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:27:11.716276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:11.716331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:11.716381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:11.716435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:27:11.716577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:27:11.718185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:27:11.718332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:27:11.718654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:11.718784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:11.718838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:11.719102Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:27:11.719162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:11.719358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:11.719448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:27:11.721259Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:11.721308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:11.721541Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:11.721586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:27:11.721941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:11.721996Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:27:11.722102Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:11.722138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:11.722180Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:11.722216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:11.722255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:27:11.722298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:11.722333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:27:11.722366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:27:11.722446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:11.722490Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:27:11.722526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:27:11.723128Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:11.723250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:11.723293Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:27:11.723336Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:27:11.723382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:11.723470Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:27:11.726564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:27:11.727050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:27:11.728196Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-02-19T08:27:11.729354Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-02-19T08:27:11.731606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:11.731795Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-02-19T08:27:11.731927Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-02-19T08:27:11.733020Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:27:11.735503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:11.735722Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER STREAMING QUERY, path: MyStreamingQuery 2026-02-19T08:27:11.736351Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:27:11.736554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:27:11.736599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:27:11.736921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:11.737003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:11.737074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2684] TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:09.773463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:09.773542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:09.773601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:09.773647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:09.773678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:09.773705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:09.773749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:09.773769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:09.773831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:09.773895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:09.774980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:09.775226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:09.845475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:09.845539Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:09.858264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:09.859158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:09.859378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:09.872014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:09.874549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:09.875117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:09.875402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:09.879477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:09.879688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:09.881044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:09.881105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:09.881240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:09.881287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:09.881345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:09.881513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:10.033178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.034969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.035056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.035113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.035181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:10.035265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:11.839379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:27:11.839418Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:27:11.839465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:11.840047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:11.840121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:11.840150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:27:11.840178Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:27:11.840226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:27:11.840305Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:27:11.843181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:27:11.843320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:27:11.843519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:27:11.843562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:27:11.843895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:11.843974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:11.844012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:708:2697] TestWaitNotification: OK eventTxId 101 2026-02-19T08:27:11.844437Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:11.844638Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 234us result status StatusSuccess 2026-02-19T08:27:11.845016Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-02-19T08:27:11.847551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:11.847890Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 102:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } 2026-02-19T08:27:11.847972Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 102:0, path# /MyRoot/MyStreamingQuery 2026-02-19T08:27:11.848147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:27:11.850343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:11.850550Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: MyStreamingQuery TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:27:11.850834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:27:11.850879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:27:11.851216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:27:11.851307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:11.851344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:716:2705] TestWaitNotification: OK eventTxId 102 2026-02-19T08:27:11.851771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:11.851959Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 214us result status StatusSuccess 2026-02-19T08:27:11.852342Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit95 >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> TOlapNaming::CreateColumnStoreOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] Test command err: 2026-02-19T08:22:40.311223Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488867816990700:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.311567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.346251Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:40.548657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:40.548778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:40.588842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:40.591496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:40.634554Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:40.726366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f42/r3tmp/yandexdl7WfJ.tmp 2026-02-19T08:22:40.726411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f42/r3tmp/yandexdl7WfJ.tmp 2026-02-19T08:22:40.726543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f42/r3tmp/yandexdl7WfJ.tmp 2026-02-19T08:22:40.726641Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:40.749578Z INFO: TTestServer started on Port 13946 GrpcPort 29153 2026-02-19T08:22:40.871646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:29153 2026-02-19T08:22:40.940634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.016915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.166055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-02-19T08:22:41.316770Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:43.391396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488880701893489:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.391401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488880701893479:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.391548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.391974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488880701893510:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.392054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.396185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:43.407958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488880701893509:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-02-19T08:22:43.607983Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488880701893575:2643] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:43.653065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.691175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.769973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.782970Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488880701893583:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:43.783993Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=M2IxYWFjMTctYzgzZjRiNDMtYjcyZDk4NDAtOGI1YzkwZDk=, ActorId: [1:7608488880701893477:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfx1d9dv3mrrws6adk4e5n, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:43.785996Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488884996861161:2821] 2026-02-19T08:22:45.313842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488867816990700:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.313913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:49.165154Z :WriteToTopic_Demo_40_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:22:49.192762Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:49.213154Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488906471697826:2910] connected; active server actors: 1 2026-02-19T08:22:49.213357Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:49.222953Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:49.223120Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:49.236212Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:49.237384Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:22:49.237601Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-02-19T08:22:49.237631Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:22:49.237664Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72075186224037892] doesn't have tx writes info 2026-02-19T08:22:49.240609Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72075186224037892] server connected, pipe [1:7608488906471697825:2909], ... on.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.586174Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.621833Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.621874Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.621892Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.621910Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.621925Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.689832Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.689874Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.689893Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.689913Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.689929Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.724942Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.724985Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.725000Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.725018Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.725032Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.789861Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.789909Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.789928Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.789943Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.789955Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.825219Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.825257Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.825275Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.825293Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.825308Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.890702Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.890743Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.890760Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.890778Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.890802Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.925574Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.925618Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.925635Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.925655Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.925668Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:08.991131Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:08.991179Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.991196Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:08.991215Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:08.991230Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:09.025923Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:09.025973Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:09.025998Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:09.026027Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:09.026046Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:09.109384Z :INFO: [/Root] [/Root] [b2df99da-5609e259-d2fc5f05-b6f87af4] Closing read session. Close timeout: 0.000000s 2026-02-19T08:27:09.109449Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:319:320 2026-02-19T08:27:09.113635Z :INFO: [/Root] [/Root] [b2df99da-5609e259-d2fc5f05-b6f87af4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2001 BytesRead: 20480000 MessagesRead: 320 BytesReadCompressed: 20480000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:27:09.113746Z :NOTICE: [/Root] [/Root] [b2df99da-5609e259-d2fc5f05-b6f87af4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:27:09.113802Z :DEBUG: [/Root] [/Root] [b2df99da-5609e259-d2fc5f05-b6f87af4] [] Abort session to cluster 2026-02-19T08:27:09.114323Z :NOTICE: [/Root] [/Root] [b2df99da-5609e259-d2fc5f05-b6f87af4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:27:10.574084Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:10.574120Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:10.574137Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:10.574157Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:10.574170Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:27:10.574478Z node 15 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:27:10.574621Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_8375990184185545882_v1 grpc read done: success# 0, data# { } 2026-02-19T08:27:10.574636Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_8375990184185545882_v1 grpc read failed 2026-02-19T08:27:10.574660Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_15_1_8375990184185545882_v1 grpc closed 2026-02-19T08:27:10.574694Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_8375990184185545882_v1 is DEAD 2026-02-19T08:27:10.575254Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:27:10.575270Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:10.575281Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:27:10.575296Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:27:10.575304Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:27:10.576510Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608490013605754532:2523] disconnected. 2026-02-19T08:27:10.576528Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608490013605754532:2523] disconnected; active server actors: 1 2026-02-19T08:27:10.576541Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608490013605754532:2523] client test-consumer disconnected session test-consumer_15_1_8375990184185545882_v1 2026-02-19T08:27:10.576619Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_8375990184185545882_v1 2026-02-19T08:27:10.576644Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608490013605754535:2526] destroyed 2026-02-19T08:27:10.576672Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_8375990184185545882_v1 >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> TOlapNaming::CreateColumnTableFailed [GOOD] >> TStreamingQueryTest::ParallelCreateSameStreamingQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:04.960208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:04.960301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.960404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:04.960444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:04.960491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:04.960534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:04.960577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:04.960611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:04.960663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.960723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:04.961540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:04.961883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.035427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.035470Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.047007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.047760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.047977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.063315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.063965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.064652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.064978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.067881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.068073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.069194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.069251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.069353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.069438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.069483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.069665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.232059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.233920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.234017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.234087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 05: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:27:13.133390Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 2026-02-19T08:27:13.134739Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:13.134805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:13.135001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:27:13.135143Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:13.135190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:27:13.135236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:27:13.135293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.135349Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:27:13.135415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-02-19T08:27:13.136578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:13.136681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:13.136728Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:27:13.136771Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:27:13.136814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:13.137737Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:13.137813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:27:13.137841Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:27:13.137869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:27:13.137898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:27:13.137975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-02-19T08:27:13.140014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-02-19T08:27:13.141202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:27:13.141611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:27:13.153610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-02-19T08:27:13.153676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:27:13.153820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:27:13.155551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.155702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.155744Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:27:13.155864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:27:13.155903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:27:13.155947Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:27:13.155983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:27:13.156021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:27:13.156091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:751:2727] message: TxId: 101 2026-02-19T08:27:13.156141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:27:13.156181Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:27:13.156217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:27:13.156350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:27:13.158073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.158125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:752:2728] TestWaitNotification: OK eventTxId 101 2026-02-19T08:27:13.158569Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:13.158797Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 254us result status StatusSuccess 2026-02-19T08:27:13.159494Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlapNaming::AlterColumnTableOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:12.598045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:12.598153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:12.598246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:12.598305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:12.598344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:12.598385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:12.598420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:12.598453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:12.598547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:12.598637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:12.599562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:12.599892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:12.672790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:12.672851Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:12.683666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:12.684348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:12.684519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:12.696060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:12.696685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:12.697328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:12.697646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:12.700709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:12.700888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:12.702139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:12.702220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:12.702340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:12.702390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:12.702434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:12.702583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:12.833097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.834936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.835533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... transaction, txId: 131, at schemeshard: 72057594046678944 2026-02-19T08:27:13.502979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.503002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.503068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.503168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.503273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.503289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.503386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.503556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.503646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.503753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.503854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-02-19T08:27:13.503985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-02-19T08:27:13.504166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-02-19T08:27:13.504283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-02-19T08:27:13.504497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-02-19T08:27:13.504621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 113, at schemeshard: 72057594046678944 2026-02-19T08:27:13.504840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.504912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.504937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.505034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.505051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.505156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.505170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.505256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.505274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.505351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.505376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:789:2777] 2026-02-19T08:27:13.505467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.505487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:789:2777] TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 2026-02-19T08:27:13.508469Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:13.508751Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 278us result status StatusSuccess 2026-02-19T08:27:13.509221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] >> TStreamingQueryTest::ParallelCreateStreamingQuery >> TStreamingQueryTest::DropStreamingQueryTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2026-02-19T08:27:04.769568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:04.769659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.769740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:04.769777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:04.769821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:04.769860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:04.769905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:04.770023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:04.770083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:04.770164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:04.771071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:04.771385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:04.862039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:04.862117Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:04.867515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:04.867718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:04.867852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:04.870991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:04.871202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:04.871868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:04.872106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:04.874052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:04.874250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:04.875114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:04.875170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:04.875524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:04.875588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:04.875638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:04.875810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.033689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.034725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.034857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.034925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.034993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.035682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ate for txid 103:0 Propose -> ProposedWaitParts 2026-02-19T08:27:13.384878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:13.384940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:27:13.386219Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 3 Name: "comment" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:27:13.386343Z node 2 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:257;event=new_schema;snapshot=plan_step=5000040;tx_id=103;;switch_optimizer=0;switch_accessors=1; FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-02-19T08:27:13.388111Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:13.388169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:13.388370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:27:13.388548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:13.388592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-02-19T08:27:13.388638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:27:13.389293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.389372Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:27:13.389439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-02-19T08:27:13.389523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:27:13.390250Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.390357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.390405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:27:13.390442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:27:13.390483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:13.391722Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.391800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.391830Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:27:13.391857Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-02-19T08:27:13.391903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:27:13.393863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-02-19T08:27:13.394115Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:13.394152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:27:13.394924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:27:13.395867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:27:13.395923Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:13.395970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:27:13.396419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.396495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:27:13.396523Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:27:13.396555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-02-19T08:27:13.396586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:27:13.396661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:27:13.397990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:27:13.410171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-02-19T08:27:13.410235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2026-02-19T08:27:13.410347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-02-19T08:27:13.410409Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:27:13.412211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.412357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.412396Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:27:13.412521Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:27:13.412559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:27:13.412594Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:27:13.412626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:27:13.412662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:27:13.412717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:751:2727] message: TxId: 103 2026-02-19T08:27:13.412768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:27:13.412808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:27:13.412843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:27:13.412955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:27:13.414546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.414599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:854:2822] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.630795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.630889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.630974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.631023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.631075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.631112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.631172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.631205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.631270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.631358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.632253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.632583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.723390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.723448Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.735800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.736466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.736682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.748564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.749323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.750127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.750459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.753725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.753930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.755182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.755252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.755371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.755417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.755467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.755665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.909643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.910695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.910841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.910946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.911841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:27:13.854019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:13.854151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:13.854197Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:13.854451Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:27:13.854504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:13.854685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:13.854758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:27:13.856442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:13.856493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:13.856707Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:13.856751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:27:13.857099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.857152Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:27:13.857260Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:13.857312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:13.857357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:13.857415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:13.857456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:27:13.857499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:13.857538Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:27:13.857573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:27:13.857642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:13.857680Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:27:13.857738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:27:13.858274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:13.858374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:13.858418Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:27:13.858461Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:27:13.858505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:13.858592Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:27:13.861037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:27:13.861523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:27:13.862356Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-02-19T08:27:13.863568Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-02-19T08:27:13.865536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:13.865821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.865982Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-02-19T08:27:13.866795Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:27:13.868576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:13.868753Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-02-19T08:27:13.869363Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:27:13.869565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:27:13.869609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:27:13.869958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:13.870042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.870086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2684] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:27:13.872404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:13.872659Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.872841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2026-02-19T08:27:13.874722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:13.874896Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:27:13.875125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:27:13.875157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:27:13.875440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:27:13.875509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.875538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:702:2691] TestWaitNotification: OK eventTxId 102 >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:12.632511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:12.632645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:12.632726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:12.632764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:12.632798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:12.632835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:12.632868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:12.632898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:12.632982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:12.633058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:12.634011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:12.634357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:12.718374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:12.718463Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:12.731669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:12.732366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:12.732576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:12.744812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:12.745581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:12.746522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:12.746882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:12.750309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:12.750519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:12.751966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:12.752041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:12.752166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:12.752215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:12.752263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:12.752450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:12.896402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.897937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:12.898002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 2026-02-19T08:27:14.427189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:14.427338Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:27:14.427521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.427568Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:27:14.427605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:27:14.427649Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:27:14.429282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.429354Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:27:14.429398Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:27:14.430823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.430866Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.430909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:14.430955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:27:14.431078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:27:14.432380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:27:14.432498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:27:14.432769Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:14.432879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:14.432918Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:14.433163Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:27:14.433215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:14.433387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:14.433456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:27:14.435049Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:14.435094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:14.435274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:14.435314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:27:14.435600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.435641Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:27:14.435731Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:14.435769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:14.435808Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:14.435842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:14.435879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:27:14.435917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:14.435956Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:27:14.435986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:27:14.436049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:14.436088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:27:14.436120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:27:14.436632Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:14.436741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:14.436805Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:27:14.436840Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:27:14.436876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:14.436957Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:27:14.439726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:27:14.440139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:27:14.440588Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-02-19T08:27:14.441608Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-02-19T08:27:14.443632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:14.443793Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2026-02-19T08:27:14.443869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2026-02-19T08:27:14.443953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-02-19T08:27:14.444936Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:27:14.447167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:14.447357Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE STREAMING QUERY, path: 2026-02-19T08:27:14.447920Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] >> TStreamingQueryTest::CreateStreamingQueryWithProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.545876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.545967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.546041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.546091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.546146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.546192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.546236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.546263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.546322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.546391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.547235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.547551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.635543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.635593Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.648288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.648978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.649190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.660865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.661533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.662256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.662575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.665571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.665786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.666911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.666974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.667072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.667126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.667170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.667331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.812065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.813944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.814010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.814095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.814163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... -19T08:27:13.916081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.916187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.920998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.921046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.921848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.921966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.922338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.934785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.935721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.942951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.943770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.944784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.944911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.944994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:13.945611Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:27:13.945761Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:13.945802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:13.945844Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:13.945885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:13.945933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:27:13.946022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:3133:4351] message: TxId: 102 2026-02-19T08:27:13.946075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:13.946143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:27:13.946180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:27:13.947800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-02-19T08:27:13.952878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:13.952949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:4048:5205] TestWaitNotification: OK eventTxId 102 >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:14.032410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:14.032482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:14.032525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:14.032559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:14.032598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:14.032626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:14.032643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:14.032664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:14.032728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:14.032787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:14.033446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:14.033724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:14.117556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:14.117594Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:14.126718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:14.127382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:14.127555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:14.136477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:14.137108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:14.137652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:14.137917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:14.140663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:14.140832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:14.141854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:14.141897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:14.141986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:14.142024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:14.142052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:14.142156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.283070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.283977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.284993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... meshard: 72057594046678944 2026-02-19T08:27:15.212931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-02-19T08:27:15.213058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:27:15.213835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.213924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.213961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:27:15.214001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:27:15.214036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:27:15.214773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.214861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.214890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:27:15.214919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 2 2026-02-19T08:27:15.214948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:27:15.215005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-02-19T08:27:15.217294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-02-19T08:27:15.217454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 2026-02-19T08:27:15.219258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:27:15.234336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:27:15.234592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:15.234690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:15.234737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:35: MkDir::TPropose operationId# 105:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000040, at schemeshard: 72057594046678944 2026-02-19T08:27:15.234863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 105:0 Propose -> Done 2026-02-19T08:27:15.235008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:27:15.235052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 FAKE_COORDINATOR: Erasing txId 105 2026-02-19T08:27:15.236929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:15.236990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:15.237165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-02-19T08:27:15.237246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:15.237292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:857:2811], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-02-19T08:27:15.237328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:857:2811], at schemeshard: 72057594046678944, txId: 105, path id: 40 2026-02-19T08:27:15.237559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-02-19T08:27:15.237606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 105:0 ProgressState 2026-02-19T08:27:15.237698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:27:15.237762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:27:15.237793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-02-19T08:27:15.237815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:27:15.237849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-02-19T08:27:15.237880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-02-19T08:27:15.237914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-02-19T08:27:15.237944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 105:0 2026-02-19T08:27:15.238008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:27:15.238047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-02-19T08:27:15.238078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2026-02-19T08:27:15.238107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-02-19T08:27:15.239139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.239225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.239256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:27:15.239296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2026-02-19T08:27:15.239335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:27:15.241314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.241382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:27:15.241404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:27:15.241424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:27:15.241450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-02-19T08:27:15.241532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-02-19T08:27:15.243791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:27:15.244878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:14.683351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:14.683432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:14.683479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:14.683506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:14.683540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:14.683578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:14.683605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:14.683630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:14.683702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:14.683764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:14.684482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:14.684760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:14.751377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:14.751432Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:14.760675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:14.761311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:14.761482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:14.771099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:14.771853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:14.772603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:14.772902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:14.775999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:14.776223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:14.777459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:14.777519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:14.777636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:14.777696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:14.777763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:14.777954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:14.889605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.890639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.890786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.890897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:14.891811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... escribe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:15.454120Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 245us result status StatusSuccess 2026-02-19T08:27:15.454475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:15.454987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:15.455162Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 184us result status StatusSuccess 2026-02-19T08:27:15.455433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2026-02-19T08:27:15.455740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:27:15.455797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:27:15.455901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:27:15.455926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-02-19T08:27:15.455978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:27:15.456010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:27:15.456520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:15.456669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:15.456712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:709:2697] 2026-02-19T08:27:15.456898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:27:15.456996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:27:15.457055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:15.457082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2697] 2026-02-19T08:27:15.457207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:27:15.457235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:709:2697] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-02-19T08:27:15.457802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:15.458011Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 172us result status StatusSuccess 2026-02-19T08:27:15.458314Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-02-19T08:27:15.460799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "NilNoviSubLuna" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:15.461044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 104:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "NilNoviSubLuna" } 2026-02-19T08:27:15.461128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 104:0, path# /MyRoot/NilNoviSubLuna 2026-02-19T08:27:15.461266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:27:15.463666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:15.463929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: NilNoviSubLuna TestModificationResult got TxId: 104, wait until txId: 104 >> TStreamingQueryTest::CreateStreamingQueryWithProperties [GOOD] >> TStreamingQueryTest::DropStreamingQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:15.214716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:15.214832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.214885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:15.214913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:15.214942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:15.214982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:15.215007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:15.215026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:15.215083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.215181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:15.215918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:15.216146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:15.285666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:15.285740Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:15.294690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:15.295341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:15.295497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:15.305465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:15.306114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:15.306820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:15.307047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:15.309662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:15.309880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:15.310976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:15.311024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:15.311114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:15.311168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:15.311215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:15.311339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:15.413430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.414904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... th "/MyRoot/DirA/MyStreamingQuery1" took 206us result status StatusSuccess 2026-02-19T08:27:16.006188Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:16.006797Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:16.006958Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 138us result status StatusSuccess 2026-02-19T08:27:16.007164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:16.007666Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:16.007830Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 122us result status StatusSuccess 2026-02-19T08:27:16.008155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:16.008643Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:16.008806Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 171us result status StatusSuccess 2026-02-19T08:27:16.008995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:16.009407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:16.009558Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 170us result status StatusSuccess 2026-02-19T08:27:16.009813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: 2026-02-19T08:21:26.343386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488550593035106:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:26.345087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:21:26.830942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:21:26.893641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:21:26.901950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:21:26.950985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:21:26.984271Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488550593035059:2081] 1771489286340878 != 1771489286340881 2026-02-19T08:21:27.001819Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:21:27.033133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:21:27.218256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:21:27.218274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:21:27.218281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:21:27.218377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:21:27.379494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:21:27.806416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:21:27.924007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:28.114628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:28.313747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:28.439622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.520465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488567772907193:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.520606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.521215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488567772907203:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.521268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:30.868567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.931861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:30.990533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.065472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.122978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.216471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.306077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.350902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488550593035106:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:21:31.350986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:21:31.405019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:31.615386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488572067875986:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.615519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.615890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488572067875991:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.615957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488572067875992:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.616105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:21:31.622158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:21:31.647454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488572067875995:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:21:31.751689Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488572067876083:5432] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:21:33.400231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:21:41.804571Z no ... .cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:51.959622Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:52.026551Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:52.026582Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:52.026593Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:52.026718Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:52.118157Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:52.630403Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:52.649482Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:52.744457Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:52.880291Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:52.963286Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:53.056406Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:55.903110Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489961666752051:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:55.903203Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:55.903644Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489961666752061:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:55.903700Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:55.965537Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.006402Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.050327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.088487Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.125483Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.168105Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.214152Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.287736Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:56.386160Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489965961720228:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:56.386244Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:56.386535Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489965961720234:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:56.386619Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608489965961720233:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:56.386700Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:56.390105Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:56.401544Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608489965961720237:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:26:56.492240Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608489965961720288:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:56.768340Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608489944486881055:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:56.768440Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:26:58.634737Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:06.885601Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:27:06.885643Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:08.945420Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=NTZlNTJkYjctNTNhMjdiOTMtNDM5MGNiMTQtZDczNjAyMjM=, ActorId: [5:7608490013206361735:2692], ActorState: ExecuteState, LegacyTraceId: 01khtg53ar6kv2s09wyv5kfsy8, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Intermediate data materialization exceeded size limit (88240890 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data." issue_code: 2013 severity: 1 } trace_id#
: Error: Intermediate data materialization exceeded size limit (88240890 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 >> TFlatTest::CopyTableAndRead >> TFlatTest::LargeProxyReply >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] >> TFlatTest::ShardUnfreezeNonFrozen >> TLocksTest::SetLockFail >> TStreamingQueryTest::DropStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:15.368996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:15.369083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.369140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:15.369179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:15.369215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:15.369249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:15.369294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:15.369320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:15.369414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.369481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:15.370359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:15.370634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:15.453059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:15.453111Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:15.463177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:15.463781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:15.463952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:15.472576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:15.473161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:15.473701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:15.474038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:15.476355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:15.476514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:15.477405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:15.477450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:15.477531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:15.477564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:15.477595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:15.477752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:15.596378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:15.597961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... teParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:27:17.193024Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:27:17.194846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:17.194909Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:27:17.194963Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:27:17.196438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:17.196483Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:17.196530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:17.196578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:27:17.196713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:27:17.198070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:27:17.198198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:27:17.198492Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:17.198600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:17.198644Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:17.198901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:27:17.198951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:27:17.199121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:17.199192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:17.202535Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:17.202594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:17.202803Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:17.202849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:27:17.203206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:27:17.203256Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:27:17.203357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:17.203393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:17.203432Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:27:17.203469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:17.203508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:27:17.203553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:27:17.203590Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:27:17.203625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:27:17.203700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:17.203741Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:27:17.203775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:27:17.204362Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:17.204465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:27:17.204505Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:27:17.204544Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:27:17.204590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:17.204681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:27:17.207424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:27:17.207877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:27:17.208334Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:680:2669] Bootstrap 2026-02-19T08:27:17.209451Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:680:2669] Become StateWork (SchemeCache [2:685:2674]) 2026-02-19T08:27:17.211601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:27:17.211763Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-02-19T08:27:17.211881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-02-19T08:27:17.213203Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:27:17.215570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:17.215791Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP STREAMING QUERY, path: MyStreamingQuery 2026-02-19T08:27:17.216321Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:27:17.216517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:27:17.216565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:27:17.216893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:27:17.216984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:27:17.217021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:695:2684] TestWaitNotification: OK eventTxId 101 >> TFlatTest::PathSorting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:15.989483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:15.989552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.989594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:15.989636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:15.989676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:15.989725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:15.989754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:15.989776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:15.989839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:15.989900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:15.990651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:15.990902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:16.059729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:16.059792Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:16.070506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:16.071185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:16.071403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:16.081947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:16.082580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:16.083111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:16.083363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:16.086224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:16.086402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:16.087493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:16.087551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:16.087663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:16.087726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:16.087769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:16.087918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:16.203365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.204983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:16.205444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-02-19T08:27:17.768592Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:17.768720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:17.768777Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:27:17.768875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:27:17.768974Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:27:17.769159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:17.769227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:27:17.769937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:27:17.771404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:27:17.773231Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:17.773299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:17.773461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:27:17.773630Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:17.773669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:27:17.773735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:27:17.774078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:27:17.774132Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:27:17.774242Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:17.774279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:17.774319Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:27:17.774355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:17.774394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:27:17.774437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:27:17.774479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:27:17.774525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:27:17.774640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:27:17.774687Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:27:17.774727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-02-19T08:27:17.774758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:27:17.775214Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:17.775299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:17.775332Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:27:17.775373Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:27:17.775435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:27:17.775751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:27:17.775809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:27:17.775909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:17.776251Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:17.776323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:27:17.776362Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:27:17.776404Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:27:17.776457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:27:17.776531Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:27:17.780946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:27:17.781093Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-02-19T08:27:17.781171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:27:17.781401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:27:17.781449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:27:17.781936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:27:17.782052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:27:17.782091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:738:2727] TestWaitNotification: OK eventTxId 102 2026-02-19T08:27:17.782573Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:17.782788Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 263us result status StatusPathDoesNotExist 2026-02-19T08:27:17.782959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyStreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TObjectStorageListingTest::TestFilter >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::BridgeGroupDegradedInBothPiles >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> TFlatTest::CopyCopiedTableAndRead |94.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::CK_GoodLock >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> TFlatTest::SplitEmptyAndWrite >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TFlatTest::Ls >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TLocksFatTest::PointSetNotBreak >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [GOOD] >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards >> THealthCheckTest::TestNoSchemeShardResponse [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TOlap::StoreStats [GOOD] >> TOlap::Decimal >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TFlatTest::PartBloomFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:59.854724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:59.922273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:59.926432Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:59.926669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:59.926776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:00.211535Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:00.291281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:00.291368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:00.349149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:00.434571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:01.033882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:01.035004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:01.035056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:01.035091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:01.035518Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:01.098242Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:01.589641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:03.860465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:03.866607Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:03.870875Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:03.896806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:03.896905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:03.934346Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:03.935825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.081790Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.082242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.082780Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.083546Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.083891Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.084060Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.084205Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.084405Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.084582Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:04.259727Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:04.283487Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:04.283570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:04.296757Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:04.440723Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:04.490585Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:04.490675Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:04.516354Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:04.516525Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:04.516676Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:04.516716Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:04.516747Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:04.516797Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:04.516849Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:04.516899Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:04.517543Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:04.600944Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:04.601046Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2181:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:04.615275Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2622] 2026-02-19T08:26:04.615873Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2622], schemeshard id = 72075186224037897 2026-02-19T08:26:04.723933Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2357:2697] 2026-02-19T08:26:04.724404Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:04.726079Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Describe result: PathErrorUnknown 2026-02-19T08:26:04.726127Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Creating table 2026-02-19T08:26:04.726192Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:04.729101Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2376:2704], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:04.732543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:04.738065Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:04.738189Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:04.749376Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:04.931910Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:05.195369Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:05.281800Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:05.281880Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2359:2699] Owner: [2:2358:2698]. Column diff is empty, finishing 2026-02-19T08:26:05.907450Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... configs 2026-02-19T08:26:14.308004Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:21.430204Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-02-19T08:26:21.430282Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8557: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:21.430312Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8588: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:26:21.430341Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-02-19T08:26:23.019717Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-02-19T08:26:23.019792Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-02-19T08:26:23.020030Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-02-19T08:26:23.033445Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-02-19T08:26:23.226300Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:26:23.226475Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 34], AnalyzeActorId: [2:4506:4021] 2026-02-19T08:26:23.229455Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:26:23.267699Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table` 2026-02-19T08:26:23.267863Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], Start read next stream part 2026-02-19T08:26:23.277925Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4529:4031], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:23.278029Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4540:4036], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:23.278122Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:23.279384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4545:4041], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:23.279604Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:23.284527Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4561:4045], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:23.287432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:23.343461Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4543:4039], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-02-19T08:26:23.438158Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4622:4089], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:23.461068Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4621:4088] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:23.632015Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4643:4102]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:23.632235Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:23.632305Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4645:4104] 2026-02-19T08:26:23.632343Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4645:4104] 2026-02-19T08:26:23.632660Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4646:4105] 2026-02-19T08:26:23.632741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4646:4105], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:23.632794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:23.632889Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4645:4104], server id = [2:4646:4105], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:23.632951Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:23.632998Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4643:4102], StatRequests.size() = 1 2026-02-19T08:26:23.633066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:22.225031Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:22.225295Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], Start read next stream part 2026-02-19T08:27:22.225494Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3r5378zd51ygt5z5e669", SessionId: ydb://session/3?node_id=2&id=MzlkMzQ2YWQtZDQzN2FkNTQtYjYwYThlMGEtOTQ4Y2YzN2U=, Slow query, duration: 58.951557s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:22.227025Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-02-19T08:27:22.227249Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4765:4217], ActorId: [2:4766:4218], Starting query actor #1 [2:4767:4219] 2026-02-19T08:27:22.227310Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4766:4218], ActorId: [2:4767:4219], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:27:22.230205Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4766:4218], ActorId: [2:4767:4219], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MWFkOTBmMDItMjVhMTdhMDUtNjgxYzM5MDUtZDllMmU1MjU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:27:22.232099Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:22.232167Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4506:4021], ActorId: [2:4516:4025], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njg5ZmE2NzQtZmU3ODc0MDAtZDk4MTBkNDktNzFhOGYyZWM=, TxId: 2026-02-19T08:27:22.294152Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4784:4233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:22.294315Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:22.294344Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4784:4233], StatRequests.size() = 1 2026-02-19T08:27:22.432535Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4766:4218], ActorId: [2:4767:4219], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWFkOTBmMDItMjVhMTdhMDUtNjgxYzM5MDUtZDllMmU1MjU=, TxId: 2026-02-19T08:27:22.432647Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4766:4218], ActorId: [2:4767:4219], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWFkOTBmMDItMjVhMTdhMDUtNjgxYzM5MDUtZDllMmU1MjU=, TxId: 2026-02-19T08:27:22.433052Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4765:4217], ActorId: [2:4766:4218], Got response [2:4767:4219] SUCCESS 2026-02-19T08:27:22.433458Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:27:22.455648Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:27:22.455766Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3359:3458] >> TOlap::Decimal [GOOD] >> TOlap::MoveTableStats >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TObjectStorageListingTest::TestSkipShards [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2026-02-19T08:27:17.678827Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490056272930381:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:17.680189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:17.886402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:17.923069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.923163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.961036Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.965060Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490056272930322:2081] 1771489637674732 != 1771489637674735 2026-02-19T08:27:17.982118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.044113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.176205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:18.227728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.335120Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490060567898534:2565] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-02-19T08:27:21.280938Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490076983589507:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:21.280988Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:21.304665Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.377433Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:21.378559Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490076983589463:2081] 1771489641280085 != 1771489641280088 2026-02-19T08:27:21.414837Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:21.414917Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:21.418989Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:21.568035Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.628984Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:21.636731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:21.644000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:21.704308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:21.724184Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608490076983590412:2593] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2026-02-19T08:27:21.727885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:21.740193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2026-02-19T08:27:17.454813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490059523745596:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:17.458327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:17.674244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:17.701304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.701399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.777651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.778399Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.780343Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490059523745554:2081] 1771489637450082 != 1771489637450085 2026-02-19T08:27:17.843829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.005797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:18.080823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.245790Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:18.249056Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:18.269294Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:18.273500Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-02-19T08:27:18.437212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:27:18.437471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:515: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2026-02-19T08:27:18.438125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:27:18.438210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-02-19T08:27:18.438241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:18.438257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:27:18.438284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:27:18.438295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:27:18.438388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-02-19T08:27:18.438526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:27:18.439291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:27:18.439333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-02-19T08:27:18.439960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-02-19T08:27:18.440128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-02-19T08:27:18.440309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:27:18.440320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:27:18.440477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-02-19T08:27:18.440564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:27:18.440582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490059523746088:2247], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 38 2026-02-19T08:27:18.440606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490059523746088:2247], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 40 2026-02-19T08:27:18.440653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2026-02-19T08:27:18.440700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-02-19T08:27:18.440987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:27:18.441091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:27:18.443841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-02-19T08:27:18.443926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-02-19T08:27:18.443940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-02-19T08:27:18.443960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-02-19T08:27:18.443976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-02-19T08:27:18.444255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-02-19T08:27:18.444310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-02-19T08:27:18.444318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-02-19T08:27:18.444350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 1 2026-02-19T08:27:18.444364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 5 ... 19T08:27:21.706235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490075087276298 RawX2: 4503608217307378 } TabletId: 72075186224037889 State: 4 2026-02-19T08:27:21.706251Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:21.706544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490075087276604 RawX2: 4503608217307438 } TabletId: 72075186224037891 State: 4 2026-02-19T08:27:21.706570Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:21.706665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490075087276604 RawX2: 4503608217307438 } TabletId: 72075186224037891 State: 4 2026-02-19T08:27:21.706678Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:21.706944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:21.706947Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-02-19T08:27:21.706983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:21.707040Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:27:21.707041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:21.707053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:21.707089Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:27:21.707091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:21.707102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:21.707319Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:27:21.707321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:21.707333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:21.707373Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:27:21.707377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:21.707387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:21.711312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:21.711614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:27:21.711803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:27:21.711948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:27:21.712047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:27:21.712155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:21.712258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:27:21.712362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:27:21.712448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:27:21.712577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:21.712593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:27:21.712646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:27:21.712665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:27:21.712685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:27:21.712947Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:27:21.712969Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:27:21.712978Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:27:21.712987Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-02-19T08:27:21.715721Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:27:21.715742Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:27:21.715757Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:27:21.715772Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-02-19T08:27:21.715983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:27:21.715999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:27:21.716030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:27:21.716037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:27:21.716055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:27:21.716063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:27:21.716080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:27:21.716097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:27:21.716113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:27:21.716139Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:21.718142Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:27:21.718292Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:27:21.719981Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:27:21.720092Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:27:21.721506Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:27:21.721548Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:27:21.723170Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-02-19T08:27:21.723214Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:27:22.005926Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted 2026-02-19T08:27:22.006338Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2026-02-19T08:27:22.007068Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-02-19T08:27:22.007674Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) 2026-02-19T08:27:22.015151Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2026-02-19T08:27:18.490289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490063366083904:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:18.491452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:18.513196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.755104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.755204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.788957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.801911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.838988Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:18.839947Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490063366083873:2081] 1771489638486951 != 1771489638486954 2026-02-19T08:27:19.007513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:19.089472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.648908Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490077154370670:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:21.649500Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:21.773928Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:21.774708Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.784287Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:21.784357Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:21.787541Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:21.965371Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.986825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:22.072671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:22.535642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:22.658955Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFlatTest::SplitBoundaryRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2026-02-19T08:27:19.115156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490067466255255:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:19.116921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:19.345937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:19.346038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:19.384993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.413766Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490067466255221:2081] 1771489639112659 != 1771489639112662 2026-02-19T08:27:19.425324Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:19.432973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:19.590858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.625051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:19.679346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:19.822469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:19.839734Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490067466256176:2592] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-02-19T08:27:19.842176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:19.854562Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490067466256216:2626] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-02-19T08:27:22.359262Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490079901961796:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:22.362781Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:22.395244Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:22.471966Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:22.478036Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490079901961766:2081] 1771489642356704 != 1771489642356707 2026-02-19T08:27:22.506861Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:22.506919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:22.511616Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:22.561506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:22.679206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:22.684495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:22.688220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:22.764883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:22.787764Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715660: 2026-02-19T08:27:22.788113Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7608490079901962731:2592] txid# 281474976715660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:22.788197Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7608490079901962731:2592] txid# 281474976715660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:22.788215Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7608490079901962731:2592] txid# 281474976715660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-02-19T08:27:22.790755Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976715661: 2026-02-19T08:27:22.791102Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7608490079901962739:2597] txid# 281474976715661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:22.791163Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7608490079901962739:2597] txid# 281474976715661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:22.791179Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7608490079901962739:2597] txid# 281474976715661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-02-19T08:27:22.798579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2026-02-19T08:27:18.930189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490061817302547:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:18.930262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:19.133986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.160217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:19.160355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:19.218949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:19.219380Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:19.221166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490061817302515:2081] 1771489638927041 != 1771489638927044 2026-02-19T08:27:19.344515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:19.344535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:19.344554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:19.344636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:19.368160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.564348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:19.628570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:19.937809Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFlatTest::LsPathId [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TLocksFatTest::PointSetBreak >> TLocksTest::SetEraseSet [GOOD] >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> TFlatTest::LargeProxyReplyRW [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> THealthCheckTest::BridgeGroupDegradedInBothPiles [GOOD] >> THealthCheckTest::BridgeGroupDegradedInOnePile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2026-02-19T08:27:20.140503Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490071875451425:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:20.142725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:20.327833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:20.360400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:20.360493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:20.400668Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490071875451393:2081] 1771489640138044 != 1771489640138047 2026-02-19T08:27:20.418917Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:20.428353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:20.577016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:20.635789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:20.641354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:20.747707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:20.932932Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:20.936514Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:20.963670Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1589 647 6413)b }, ecr=1.000 2026-02-19T08:27:20.969775Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2358 1432 5183)b }, ecr=1.000 2026-02-19T08:27:21.079765Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:21.079787Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:21.080107Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:21.080154Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:21.083706Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2026-02-19T08:27:21.087998Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3298 2180 6413)b }, ecr=1.000 2026-02-19T08:27:21.147664Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:21.200789Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:27:21.216412Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-02-19T08:27:21.216437Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-02-19T08:27:21.216463Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:27:21.216479Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-02-19T08:27:23.773817Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490082803635606:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:23.773877Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:23.781648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:23.862934Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:23.864111Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490082803635566:2081] 1771489643773024 != 1771489643773027 2026-02-19T08:27:23.871736Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:23.871814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:23.889850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:23.891978Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:24.054345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:24.067102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:24.078524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:24.162376Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:24.172589Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:24.206558Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:24.211966Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:27:24.225791Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 2026-02-19T08:27:24.336242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } } } TxId: 281474976715678 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:27:24.336446Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715678:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-02-19T08:27:24.336590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:24.336614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:27:24.336768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-02-19T08:27:24.336789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715678:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-02-19T08:27:24.336943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715678:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000g+\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000g+\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 4 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 Split ... 19T08:27:24.342618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2026-02-19T08:27:24.346461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715678, at schemeshard: 72057594046644480 2026-02-19T08:27:24.346483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2026-02-19T08:27:24.346512Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715678, at schemeshard: 72057594046644480 2026-02-19T08:27:24.368937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2026-02-19T08:27:24.369003Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2026-02-19T08:27:24.369726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.371908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2026-02-19T08:27:24.371954Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2026-02-19T08:27:24.371998Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715678:0 ConfigureParts -> TransferData 2026-02-19T08:27:24.372312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.372399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.372426Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:27:24.372448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2026-02-19T08:27:24.372669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2026-02-19T08:27:24.372738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2026-02-19T08:27:24.375824Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:24.376087Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:24.376275Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:24.376485Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:24.376686Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-02-19T08:27:24.382828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-02-19T08:27:24.382914Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-02-19T08:27:24.383259Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715678:0 TransferData -> NotifyPartitioningChanged 2026-02-19T08:27:24.383382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-02-19T08:27:24.383761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.383892Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:27:24.383911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:27:24.384123Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:27:24.384141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7608490082803636093:2246], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 39 2026-02-19T08:27:24.384189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.384213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:27:24.384231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2026-02-19T08:27:24.387156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2026-02-19T08:27:24.387241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2026-02-19T08:27:24.387255Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2026-02-19T08:27:24.387270Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 4 2026-02-19T08:27:24.387286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-02-19T08:27:24.387353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2026-02-19T08:27:24.387494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2026-02-19T08:27:24.387710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2026-02-19T08:27:24.389792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-02-19T08:27:24.389835Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-02-19T08:27:24.389904Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2026-02-19T08:27:24.389917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-02-19T08:27:24.389938Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2026-02-19T08:27:24.389950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-02-19T08:27:24.389970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2026-02-19T08:27:24.390014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7608490087098604018:2351] message: TxId: 281474976715678 2026-02-19T08:27:24.390043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-02-19T08:27:24.390069Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715678:0 2026-02-19T08:27:24.390080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715678:0 2026-02-19T08:27:24.390252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-02-19T08:27:24.390638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-02-19T08:27:24.390664Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715678:0 >> TFlatTest::CrossRW >> TLocksTest::BrokenLockUpdate >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> TFlatTest::SelectRangeReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2026-02-19T08:26:36.515505Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:36.515613Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:36.527663Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:36.527769Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:36.541885Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:36.964092Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:36.964207Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:36.982035Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:36.982166Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:37.018101Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:37.361294Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:37.361403Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:37.379272Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:37.379392Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:37.404167Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:37.404693Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2161], cookie=5355160555976949878, session=0, seqNo=0) 2026-02-19T08:26:37.404911Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-02-19T08:26:37.416988Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2161], cookie=5355160555976949878, session=1) 2026-02-19T08:26:37.429004Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:26:37.429087Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:26:37.429398Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:26:37.429928Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:26:37.476234Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:26:37.477154Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:192:2204], cookie=8087977162281217560) 2026-02-19T08:26:37.477240Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:192:2204], cookie=8087977162281217560) 2026-02-19T08:26:37.911600Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:37.924109Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:38.277176Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:38.289252Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:38.629216Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:38.641605Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:38.994542Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.006900Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:39.348072Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.360420Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:39.702866Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:39.715141Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:40.035110Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:40.047411Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:40.387639Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:40.399785Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:40.740519Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:40.752788Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.160776Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.173367Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.536275Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.550525Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:41.919163Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:41.931438Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:42.295177Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:42.307394Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:42.660565Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:42.672726Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:43.069035Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:43.081160Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:43.437516Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:43.450410Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:43.803743Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:43.815936Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.168139Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.181753Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.540468Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.554409Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:44.942337Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:44.958590Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:45.319641Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:45.333109Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:45.686040Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:45.702571Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:46.059452Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.071497Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:46.418056Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.430362Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:46.811098Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:46.825672Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.185089Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.197636Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.559332Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.574525Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:47.929979Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:47.942324Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:48.298664Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:48.312021Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:48.743168Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:48.756092Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.131157Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.147744Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.494532Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.506811Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:49.861760Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:49.874355Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.217561Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:50.230700Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.596384Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:50.610475Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:50.960770Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:26:50.972975Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:26:51.329091Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-1 ... k::Execute 2026-02-19T08:27:15.578046Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:15.998025Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:16.010732Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:16.369900Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:16.382386Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:16.749825Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:16.762212Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:17.128325Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:17.142073Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:17.501197Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:17.516027Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:17.877823Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:17.898366Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:18.265824Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:18.278329Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:18.646042Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:18.662635Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:19.027560Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:19.041301Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:19.413778Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:19.431344Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:19.812042Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:19.824259Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:20.179172Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:20.194547Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:20.610250Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:20.627527Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:21.001742Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:21.014084Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:21.392029Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:21.404443Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:21.816049Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:21.828410Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:22.202797Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:22.215036Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:22.584931Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:22.599106Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:22.964557Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:22.976693Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:23.341172Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:23.354489Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:23.751830Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:23.763925Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:24.127620Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:24.147042Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:24.514835Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:24.531145Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:24.883826Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:24.896463Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:25.276059Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:25.289082Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:25.662114Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:25.674495Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:26.031447Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:26.043691Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:26.386529Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:26.402165Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:26.763879Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:26.776013Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:27.135920Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-02-19T08:27:27.148331Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-02-19T08:27:27.564241Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-02-19T08:27:27.564362Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-02-19T08:27:27.578868Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-02-19T08:27:27.590107Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:535:2482], cookie=3199437761266464202) 2026-02-19T08:27:27.590215Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:535:2482], cookie=3199437761266464202) 2026-02-19T08:27:28.015823Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-02-19T08:27:28.015915Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-02-19T08:27:28.033091Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-02-19T08:27:28.033558Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-02-19T08:27:28.068256Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-02-19T08:27:28.074824Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=7376856149085350707, path="Root", config={ MaxUnitsPerSecond: 100 }) 2026-02-19T08:27:28.075106Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-02-19T08:27:28.087587Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=7376856149085350707) 2026-02-19T08:27:28.091633Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:27:28.091728Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2168], cookie=0) 2026-02-19T08:27:28.091995Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:148:2170]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-02-19T08:27:28.092032Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:148:2170], cookie=0) 2026-02-19T08:27:28.133901Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-02-19T08:27:28.134021Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-02-19T08:27:28.134317Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([5:151:2173]) 2026-02-19T08:27:28.134516Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [5:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2026-02-19T08:27:28.186799Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2026-02-19T08:27:21.232654Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490075330893692:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:21.236627Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:21.444998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.476438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:21.476575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:21.536643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490075330893650:2081] 1771489641229835 != 1771489641229838 2026-02-19T08:27:21.552638Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:21.557081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:21.671627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.785603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:21.791180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:21.873485Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490075330894497:2526] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-02-19T08:27:21.884784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:27:24.556093Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490087399021214:2062];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:24.556275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:24.580275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:24.634287Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:24.635793Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490087399021193:2081] 1771489644555204 != 1771489644555207 2026-02-19T08:27:24.644761Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:24.644842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:24.667173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:24.847171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:24.851998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:24.866963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:24.871719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase >> TOlap::MoveTableStats [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 |94.2%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> TFlatTest::SplitEmptyToMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2026-02-19T08:27:17.532249Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490059411283186:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:17.533934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:17.773005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:17.789326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.789453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.844786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.870452Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.873899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490059411283142:2081] 1771489637529518 != 1771489637529521 2026-02-19T08:27:17.988213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.085186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:18.094363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:18.101831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:18.149938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.540466Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:22.533125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490059411283186:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:22.533833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:22.671052Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7608490080886123422:4330] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2026-02-19T08:27:22.671139Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608490080886123422:4330] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2026-02-19T08:27:23.365820Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490085456701021:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:23.366157Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:23.390720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:23.482339Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:23.484224Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490085456700979:2081] 1771489643356351 != 1771489643356354 2026-02-19T08:27:23.519836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:23.519926Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:23.532544Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:23.616567Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:23.715097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:23.734400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:24.370443Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:28.044051Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7608490102636573972:4339] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2026-02-19T08:27:28.044131Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7608490102636573972:4339] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2026-02-19T08:27:18.149807Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490060556992426:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:18.150555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:18.375504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.378082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.378201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.380525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.468032Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:18.470183Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490060556992396:2081] 1771489638148186 != 1771489638148189 2026-02-19T08:27:18.594319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:18.664428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:18.751785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.888508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.936984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:18.974840Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2026-02-19T08:27:18.974990Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7608490060556993505:2698] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:18.975128Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7608490060556993505:2698] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:18.975185Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7608490060556993505:2698] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-02-19T08:27:18.978293Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2026-02-19T08:27:18.978393Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7608490060556993527:2705] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:18.978463Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7608490060556993527:2705] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:18.978492Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7608490060556993527:2705] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-02-19T08:27:18.981173Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2026-02-19T08:27:18.981271Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7608490060556993534:2709] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:18.981344Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7608490060556993534:2709] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:18.981412Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7608490060556993534:2709] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-02-19T08:27:18.984020Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2026-02-19T08:27:18.984091Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7608490060556993540:2712] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-02-19T08:27:18.984149Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7608490060556993540:2712] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-02-19T08:27:18.984175Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7608490060556993540:2712] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-02-19T08:27:21.515514Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.515592Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:21.604114Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:21.607471Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:21.607577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:21.613900Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490076913328827:2081] 1771489641490214 != 1771489641490217 2026-02-19T08:27:21.635710Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:21.771110Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.825113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:21.829615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:21.845624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:21.904643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:21.947811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:24.848531Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490086864987154:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:24.848584Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:24.865048Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:24.948699Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:24.953381Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490086864987110:2081] 1771489644847048 != 1771489644847051 2026-02-19T08:27:24.985859Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:24.985957Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:25.000684Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:25.149507Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:25.209474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:25.225859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:25.280253Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:25.324447Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TLocksTest::NoLocksSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::MoveTableStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.112703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.112792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.112879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.112915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.112947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.112971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.113032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.113059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.113116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.113175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.114055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.114370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.197168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.197211Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.208394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.208997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.209187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.217452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.218156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.218845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.219069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.221440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.221616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.222714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.222781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.222886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.222917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.222944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.223106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.347579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.348539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.348669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.348742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.348886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.349648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... de 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-02-19T08:27:28.932538Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#203:0 progress is 1/1 2026-02-19T08:27:28.932574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-02-19T08:27:28.932607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 203, ready parts: 1/1, is published: true 2026-02-19T08:27:28.932688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:866:2822] message: TxId: 203 2026-02-19T08:27:28.932764Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-02-19T08:27:28.932811Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 203:0 2026-02-19T08:27:28.932866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 203:0 2026-02-19T08:27:28.933034Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:27:28.933092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:27:28.933497Z node 3 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186233409546 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 203 2026-02-19T08:27:28.933657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:27:28.933725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:27:28.933808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:27:28.937193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 203: got EvNotifyTxCompletionResult 2026-02-19T08:27:28.937253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 203: satisfy waiter [3:1042:2987] 2026-02-19T08:27:28.937569Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 203 2026-02-19T08:27:28.938206Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:27:28.938413Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 237us result status StatusPathDoesNotExist 2026-02-19T08:27:28.938608Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:27:28.939367Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 41 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-02-19T08:27:28.939625Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 41 took 256us result status StatusSuccess 2026-02-19T08:27:28.940133Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 219 LastUpdateTime: 219 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:27:28.986046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 1225216 rowCount 100000 cpuUsage 0 2026-02-19T08:27:28.986229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:279: PersistSingleStats for pathId [OwnerId: 72057594046678944, LocalPathId: 39], tabletId 72075186233409546, followerId 0: unknown pathId 2026-02-19T08:27:28.997663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:27:29.278924Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MovedColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:27:29.279215Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MovedColumnTable" took 334us result status StatusSuccess 2026-02-19T08:27:29.279652Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 219 LastUpdateTime: 219 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::SelectRangeItemsLimit >> TLocksTest::GoodSameKeyLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2026-02-19T08:27:19.502885Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490067033212020:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:19.502962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:19.719805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:19.719868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:19.754574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.770919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:19.803331Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:19.804272Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490067033211891:2081] 1771489639496886 != 1771489639496889 2026-02-19T08:27:19.950033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:19.992199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:19.996803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:20.070701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:20.265386Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:20.284446Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:20.324262Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:20.328904Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-02-19T08:27:20.464084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:27:20.464410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:515: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-02-19T08:27:20.464897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:27:20.464948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-02-19T08:27:20.464960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:20.464979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:27:20.465010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:27:20.465024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:27:20.465149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-02-19T08:27:20.465287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:27:20.466087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:27:20.466157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-02-19T08:27:20.466887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-02-19T08:27:20.467116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-02-19T08:27:20.467339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:27:20.467363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:27:20.467490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-02-19T08:27:20.467579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:27:20.467599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490067033212425:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-02-19T08:27:20.467650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490067033212425:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-02-19T08:27:20.467714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-02-19T08:27:20.467749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-02-19T08:27:20.468126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:27:20.468281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:27:20.470902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-02-19T08:27:20.471002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-02-19T08:27:20.471013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-02-19T08:27:20.471029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-02-19T08:27:20.471041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-02-19T08:27:20.471200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-02-19T08:27:20.471226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-02-19T08:27:20.471230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-02-19T08:27:20.471236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 1 2026-02-19T ... tashard_loans.cpp:128: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2026-02-19T08:27:27.321940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-02-19T08:27:27.321986Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:27.322363Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:27:27.322418Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:27:27.323597Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-02-19T08:27:27.323632Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037892, clientId# [3:7608490101616773424:2946], serverId# [3:7608490101616773426:2948], sessionId# [0:0:0] 2026-02-19T08:27:27.323679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490101616773097 RawX2: 4503612512274733 } TabletId: 72075186224037890 State: 4 2026-02-19T08:27:27.323688Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:27:27.323711Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:27.323959Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-02-19T08:27:27.324061Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-02-19T08:27:27.324103Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037892 2026-02-19T08:27:27.324123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:27.324153Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:27.325251Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7608490101616773533:2385], serverId# [3:7608490101616773540:3045], sessionId# [0:0:0] 2026-02-19T08:27:27.325275Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3671: Client pipe to tablet 72075186224037892 from 72075186224037890 is reset 2026-02-19T08:27:27.325293Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:27:27.325455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490097321805499 RawX2: 4503612512274675 } TabletId: 72075186224037889 State: 4 2026-02-19T08:27:27.325491Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:27.325629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490101616773100 RawX2: 4503612512274734 } TabletId: 72075186224037891 State: 4 2026-02-19T08:27:27.325648Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:27.325841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:27:27.326032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:27:27.326356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:27.326387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:27.326461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:27.326471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:27.326614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:27:27.326626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:27:27.326987Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:27:27.327026Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7608490101616773177:2769], serverId# [3:7608490101616773178:2770], sessionId# [0:0:0] 2026-02-19T08:27:27.327042Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:27:27.327054Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:27:27.327288Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-02-19T08:27:27.327647Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:27:27.327699Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:27:27.328264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:27:27.328416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:27.328554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:27:27.328717Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:27:27.328773Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:27:27.328820Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-02-19T08:27:27.328831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:27.328843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:27:27.328879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:27:27.328893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:27:27.328914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:27:27.329374Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:27:27.329415Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:27:27.329900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:27:27.329913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:27:27.329938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:27:27.329946Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-02-19T08:27:27.329949Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:27:27.329967Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-02-19T08:27:27.330067Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:27.330724Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-02-19T08:27:27.330771Z node 3 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:27:27.580081Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:27.613180Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-02-19T08:27:27.614046Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-02-19T08:27:27.614540Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-02-19T08:27:27.614894Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-02-19T08:27:27.615171Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-02-19T08:27:27.615530Z node 3 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::SplitInvalidPath >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> KqpScan::ScanPg [GOOD] >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TFlatTest::SelectBigRangePerf >> TFlatTest::Init >> TLocksTest::BrokenLockErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:00.981206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:00.982326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:00.985651Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:01.108705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:01.110167Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:01.116022Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:01.116359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:01.116556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:01.118114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:01.118227Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:01.497270Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:01.618477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:01.618620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:01.619461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:01.619571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:01.671967Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:01.672477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:01.672834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:01.757837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:01.770903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:02.460490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:02.526264Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:02.526452Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:02.629898Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2026-02-19T08:27:02.630282Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-02-19T08:27:02.630502Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2026-02-19T08:27:02.630833Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-02-19T08:27:02.877146Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:227:2184] Handle TEvProposeTransaction 2026-02-19T08:27:02.877213Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:227:2184] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:27:02.877304Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:227:2184] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1579:3032] 2026-02-19T08:27:03.008425Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:1579:3032] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:27:03.008543Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:1579:3032] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:27:03.009428Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:1579:3032] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:27:03.009530Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:1579:3032] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:27:03.010070Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:1579:3032] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:27:03.010218Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:1579:3032] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:27:03.010364Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1579:3032] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:27:03.010689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:1579:3032] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:27:03.012378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:03.017365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:1579:3032] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:27:03.017492Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:1579:3032] txid# 281474976710657 SEND to# [1:1503:2986] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:27:03.077451Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1628:2399] 2026-02-19T08:27:03.077773Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:27:03.136721Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:27:03.136943Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:27:03.138688Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:27:03.138775Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:27:03.138841Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:27:03.139231Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:27:03.139543Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:27:03.139636Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [2:1652:2399] in generation 1 2026-02-19T08:27:03.153234Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:27:03.209947Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:27:03.210179Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:27:03.210296Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [2:1655:2416] 2026-02-19T08:27:03.210344Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:27:03.210382Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:27:03.210420Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:27:03.210955Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:27:03.211118Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:27:03.211247Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:27:03.211291Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:27:03.211339Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:27:03.211381Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:27:03.211565Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:1612:3060], serverId# [2:1625:2397], sessionId# [0:0:0] 2026-02-19T08:27:03.212400Z node 2 :TX_DATASHAR ... 0}. Compute actor has finished execution: [3:1923:3244] 2026-02-19T08:27:18.680464Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [3:1916:3215] TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:1924:2445], trace_id# 2026-02-19T08:27:18.680764Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [3:1916:3215] TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:1924:2445] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 271178 DurationUs: 10000 Tasks { TaskId: 1 CpuTimeUs: 268489 FinishTimeMs: 1771489638676 OutputRows: 1 OutputBytes: 7 Tables { TablePath: "/Root/table-1" ReadRows: 100 ReadBytes: 800 } ComputeCpuTimeUs: 185 BuildCpuTimeUs: 268304 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-fkrokuvqke" NodeId: 4 StartTimeMs: 1771489638666 CreateTimeMs: 1771489638368 UpdateTimeMs: 1771489638677 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:18.680860Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:1924:2445] 2026-02-19T08:27:18.680965Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [3:1916:3215] TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:27:18.681048Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [3:1916:3215] TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:27:18.681111Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [3:1916:3215] TxId: 281474976715662. Ctx: { TraceId: 01khtg5cvk0s2jpn4mq7xh2qk8, Database: , SessionId: ydb://session/3?node_id=3&id=ZGVmYWFhNDQtYmU3MmFjYjctNzc4NTk3ZTEtNjk2YWRmMDU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.721430s ReadRows: 100 ReadBytes: 800 RequestUnits# 480 ForceFlag# true trace_id# 2026-02-19T08:27:18.682197Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-02-19T08:27:18.682342Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:226:2184] Handle TEvProposeTransaction 2026-02-19T08:27:18.682383Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:226:2184] TxId# 0 ProcessProposeTransaction 2026-02-19T08:27:18.682516Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:226:2184] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1946:3255] SnapshotReq marker# P0 2026-02-19T08:27:18.683661Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1949:3255] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-02-19T08:27:18.684042Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1949:3255] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-02-19T08:27:18.684171Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1946:3255] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 ... waiting for SysViewsRoster update finished 2026-02-19T08:27:26.352987Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.353338Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.365573Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.367206Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.369981Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:506:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.370453Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.370628Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.371435Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:499:2170], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.371737Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.371969Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.695759Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:26.803708Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.803855Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.804359Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.804424Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.890250Z node 5 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-02-19T08:27:26.890738Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:26.891256Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:26.979213Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:26.994382Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:27.687120Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:27.754883Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:27.755040Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:28.116717Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:28.668854Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1740:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:27:28.668943Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1749:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:27:28.669004Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:27:28.669804Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1755:3126], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:27:28.669954Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:27:28.674520Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:27:29.158294Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1754:3125], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:27:29.275054Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:1851:3193] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:27:31.449980Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TLocksTest::Range_CorrectNullDot |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TFlatTest::GetTabletCounters [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TLocksFatTest::PointSetRemove [GOOD] >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TFlatTest::WriteMergeAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2026-02-19T08:27:28.927876Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490105110294793:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:28.929059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:29.125397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.147625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:29.147705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:29.207058Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:29.208885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:29.209457Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490105110294761:2081] 1771489648926176 != 1771489648926179 2026-02-19T08:27:29.343771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.424755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:29.507810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.223715Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490121631502075:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.225310Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.259132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.327524Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:32.333447Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490121631502012:2081] 1771489652218236 != 1771489652218239 2026-02-19T08:27:32.339626Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:32.339714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:32.370842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:32.495684Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.566843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.577943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:32.587937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:32.597753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::TestOnlyRequestNeededTablets >> THealthCheckTest::BridgeGroupDegradedInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2026-02-19T08:27:29.401922Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490111537280787:2174];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:29.406760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:29.631982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.666985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:29.667075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:29.722645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:29.725868Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:29.727782Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490111537280642:2081] 1771489649395069 != 1771489649395072 2026-02-19T08:27:29.843210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.957076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:30.034595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:30.412808Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:32.959260Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490121055229761:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.959489Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.984285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:33.074229Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490121055229731:2081] 1771489652955348 != 1771489652955351 2026-02-19T08:27:33.088059Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:33.101271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:33.101354Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:33.107989Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:33.211939Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:33.285938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:33.303603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> THealthCheckTest::ShardsLimit995 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2026-02-19T08:27:21.668275Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490076477710159:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:21.668392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:21.868644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:21.892222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:21.892299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:21.962548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:21.964542Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:21.967150Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490076477710131:2081] 1771489641666492 != 1771489641666495 2026-02-19T08:27:22.127011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:22.171023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:22.247384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:22.382019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:22.441798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:22.674987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:26.668416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490076477710159:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:26.668459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:28.633868Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490105532601687:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:28.633925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:28.649579Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:28.715003Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:28.716526Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490105532601658:2081] 1771489648632942 != 1771489648632945 2026-02-19T08:27:28.761330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:28.761414Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:28.766342Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:28.843733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:28.925560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:28.942402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.001680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.048218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.640130Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:32.402156Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490123965265481:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.402215Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.431835Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.489317Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:32.491008Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490123965265442:2081] 1771489652401233 != 1771489652401236 2026-02-19T08:27:32.531386Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:32.531473Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:32.533280Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:32.719292Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.756724Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.782452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.840369Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.915566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:33.408948Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> THealthCheckTest::ShardsLimit905 >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::SelectRangeBothLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2026-02-19T08:27:31.572213Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490117744129735:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:31.572276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:31.781382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:31.789370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:31.789547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:31.861971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:31.867741Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:31.869844Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490117744129704:2081] 1771489651570393 != 1771489651570396 2026-02-19T08:27:32.045982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.095389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.104721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:32.176065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.582731Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:34.980053Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490132005105273:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:34.980114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:35.000297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.115195Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:35.127134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:35.127209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:35.130503Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:35.207880Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.302903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:35.310146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:35.319772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TFlatTest::SelectRangeBytesLimit >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2026-02-19T08:27:32.281924Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490124632099128:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.282020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.509937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.524350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:32.524487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:32.613565Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:32.619707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:32.698168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.855015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.859616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:32.931303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976710659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2026-02-19T08:27:32.946329Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490124632099883:2496] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-02-19T08:27:35.710785Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490135488782503:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:35.711268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:35.728720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:35.825812Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.835902Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:35.840431Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490135488782474:2081] 1771489655709590 != 1771489655709593 2026-02-19T08:27:35.863519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:35.863597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:35.865912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.026079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:36.051656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.057419Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.203601Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:36.213552Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:36.242094Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:36.248017Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:27:36.272802Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.274658Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.274796Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.276008Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.276644Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.277213Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037888 restored its data 2026-02-19T08:27:36.278058Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.278161Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.278533Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.279192Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.279799Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.280176Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037888 restored its data 2026-02-19T08:27:36.280839Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.280915Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.281336Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.281952Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.285160Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.285582Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.286275Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.286379Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.286765Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037888 restored its data 2026-02-19T08:27:36.287425Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.288029Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.288408Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.289056Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.289161Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.289591Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037888 restored its data 2026-02-19T08:27:36.290282Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.290844Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.291231Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.291905Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.292012Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:36.297981Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037888 restored its data 2026-02-19T08:27:36.298752Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976715676 released its data 2026-02-19T08:27:36.298904Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:36.299328Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976715676 at 72075186224037889 restored its data 2026-02-19T08:27:36.299988Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase: ... tablet: 72057594046644480 2026-02-19T08:27:36.745934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7608490139783751248 RawX2: 4503608217307470 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2026-02-19T08:27:36.745972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, shard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:36.745993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715693:0 ProposedWaitParts -> Done 2026-02-19T08:27:36.746252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746657Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2026-02-19T08:27:36.746683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746719Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-02-19T08:27:36.746725Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:27:36.746823Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2026-02-19T08:27:36.746849Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-02-19T08:27:36.747117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:27:36.747288Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-02-19T08:27:36.747309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-02-19T08:27:36.747335Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-02-19T08:27:36.747352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-02-19T08:27:36.747383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2026-02-19T08:27:36.747434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7608490139783751481:2420] message: TxId: 281474976715693 2026-02-19T08:27:36.747467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-02-19T08:27:36.747485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-02-19T08:27:36.747493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715693:0 2026-02-19T08:27:36.747580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:36.748142Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:27:36.748193Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-02-19T08:27:36.749066Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:27:36.749130Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state 2026-02-19T08:27:36.750530Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:27:36.750616Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:27:36.750918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490139783750630 RawX2: 4503608217307379 } TabletId: 72075186224037889 State: 4 2026-02-19T08:27:36.751022Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:36.751189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490139783751248 RawX2: 4503608217307470 } TabletId: 72075186224037894 State: 4 2026-02-19T08:27:36.751217Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:36.751506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:36.751551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:36.751632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:36.751658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:36.751987Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:27:36.752041Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2026-02-19T08:27:36.753178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:27:36.753412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:27:36.753683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-02-19T08:27:36.753725Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:27:36.753757Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037894 reason = ReasonStop 2026-02-19T08:27:36.753857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:36.757961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:36.759261Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:27:36.759293Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-02-19T08:27:36.759522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:27:36.759598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:27:36.762673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:27:36.762709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:27:36.762759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:7 2026-02-19T08:27:36.762800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-02-19T08:27:36.763046Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:27:36.763135Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:27:36.763383Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:36.765953Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037894 2026-02-19T08:27:36.766042Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037894 >> TObjectStorageListingTest::Split >> TLocksTest::Range_BrokenLockMax ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2026-02-19T08:27:33.058274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490127936526119:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:33.058329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:33.337045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:33.337165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:33.381443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:33.403480Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:33.407020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:33.407392Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490127936526088:2081] 1771489653054325 != 1771489653054328 2026-02-19T08:27:33.537799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:33.606309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:33.610760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:33.644992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-02-19T08:27:33.695143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.076078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; insert finished 9625 usec 8861 usec 10686 usec 11445 usec 9426 usec 11140 usec 11120 usec 14491 usec 9083 usec 9345 usec 2026-02-19T08:27:36.647948Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490139271856295:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:36.647991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:36.682850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.739053Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:36.776176Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.776257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.779707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.944927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:36.947952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.967726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.652960Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [GOOD] >> TLocksFatTest::LocksLimit [GOOD] >> TFlatTest::LargeDatashardReplyDistributed >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query >> TLocksTest::Range_Pinhole >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> TFlatTest::WriteSplitAndRead [GOOD] >> TLocksTest::GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2026-02-19T08:27:28.292203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490104026399328:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:28.292779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:28.474395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:28.499363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:28.499469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:28.538882Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:28.540272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:28.540642Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490104026399297:2081] 1771489648290756 != 1771489648290759 2026-02-19T08:27:28.713290Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:28.737128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:28.792281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:28.914414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:28.957833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.298808Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:33.292302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490104026399328:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:33.292391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:35.505168Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490133826983723:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:35.505678Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:35.534689Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.588990Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:35.590712Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490133826983694:2081] 1771489655504144 != 1771489655504147 2026-02-19T08:27:35.657032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:35.657134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:35.662726Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:35.700456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.805702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:35.831978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:35.887663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:35.931124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.224762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:39.224845Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:39.296728Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:39.302112Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490151549436847:2081] 1771489659199263 != 1771489659199266 2026-02-19T08:27:39.331553Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:39.331649Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:39.337959Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:39.498186Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:39.507786Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:39.527001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.584255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.635305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:40.207363Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> KqpBatchDelete::ManyPartitions_1 [GOOD] >> KqpBatchDelete::ManyPartitions_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2026-02-19T08:27:37.597324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490142735056468:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:37.597386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:37.814975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:37.840846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:37.840962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:37.902042Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:37.904320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490142735056221:2081] 1771489657573572 != 1771489657573575 2026-02-19T08:27:37.912896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:38.055360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:38.130578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:38.207095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:38.387510Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:38.390022Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:38.423630Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:38.429223Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:27:38.453728Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.457979Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-02-19T08:27:38.460479Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:27:38.460567Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:27:38.461120Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:27:38.461151Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-02-19T08:27:38.463697Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.465988Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:27:38.466034Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:27:38.468913Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.470426Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:27:38.470483Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:27:38.473322Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.473452Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2026-02-19T08:27:38.474704Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:27:38.474719Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-02-19T08:27:38.475654Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:27:38.475693Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:27:38.487697Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.489644Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.489817Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.491209Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.491800Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.492351Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.493383Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.493518Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.494099Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037889 restored its data 2026-02-19T08:27:38.494906Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.495002Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.495520Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.496296Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.496391Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.496892Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037889 restored its data 2026-02-19T08:27:38.497595Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.498029Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.498473Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.499227Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.499318Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.499814Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037889 restored its data 2026-02-19T08:27:38.500605Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.501050Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.501584Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.502340Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.502442Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.502871Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037889 restored its data 2026-02-19T08:27:38.503533Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.503636Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.504124Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.504909Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.504996Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:38.505469Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037889 restored its data 2026-02-19T08:27:38.506274Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.506375Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.506819Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.507520Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710680 released its data 2026-02-19T08:27:38.507964Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:38.508380Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710680 at 72075186224037888 restored its data 2026-02-19T08:27:38.509157Z node 1 :TX_DATASHA ... 644480:5 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:41.956175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:41.956188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:41.956235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:41.956246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:41.956620Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:27:41.956641Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:27:41.956654Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:27:41.956665Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:27:41.956675Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-02-19T08:27:41.956713Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-02-19T08:27:41.956758Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:27:41.958420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:41.958749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-02-19T08:27:41.959024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:41.959235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:27:41.959464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:27:41.959681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:27:41.959873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:27:41.960042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:41.960189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:27:41.960335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:27:41.960486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:27:41.960635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-02-19T08:27:41.960774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:27:41.960920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:41.961066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:41.961087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:27:41.961130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:27:41.961843Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-02-19T08:27:41.961886Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7608490162611790863:2585], serverId# [2:7608490162611790864:2586], sessionId# [0:0:0] 2026-02-19T08:27:41.961903Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:27:41.961918Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7608490162611791227:2814], serverId# [2:7608490162611791228:2815], sessionId# [0:0:0] 2026-02-19T08:27:41.961948Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-02-19T08:27:41.961976Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7608490162611791229:2816], serverId# [2:7608490162611791230:2817], sessionId# [0:0:0] 2026-02-19T08:27:41.961993Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-02-19T08:27:41.962010Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7608490162611791231:2818], serverId# [2:7608490162611791232:2819], sessionId# [0:0:0] 2026-02-19T08:27:41.962023Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:27:41.962038Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7608490162611790873:2592], serverId# [2:7608490162611790874:2593], sessionId# [0:0:0] 2026-02-19T08:27:41.962085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:27:41.962107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-02-19T08:27:41.962173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:1 2026-02-19T08:27:41.962712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:27:41.962746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:27:41.962791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:27:41.962824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:27:41.962832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:27:41.962848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:27:41.962863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:27:41.962870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-02-19T08:27:41.962898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:5 2026-02-19T08:27:41.963370Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-02-19T08:27:41.963397Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:27:41.963410Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-02-19T08:27:41.963422Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-02-19T08:27:41.963439Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:27:41.963493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:27:41.963510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:27:41.963546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:41.964245Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-02-19T08:27:41.964333Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037888 2026-02-19T08:27:41.966495Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:27:41.966559Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:27:41.968128Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-02-19T08:27:41.968191Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:27:41.969659Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-02-19T08:27:41.970622Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037892 2026-02-19T08:27:41.972330Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:27:41.972389Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> KqpComputeScheduler::StressTest [GOOD] >> TFlatTest::LargeDatashardReply [GOOD] >> TLocksTest::UpdateLockedKey >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::StorageNoQuota >> THealthCheckTest::BridgeGroupDeadInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles >> TObjectStorageListingTest::SuffixColumns [GOOD] >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsNoLimit |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpComputeScheduler::StressTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::WideCombine-EnabledLogs 2026-02-19 08:27:44,054 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-02-19 08:27:44,276 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 283971 59.4M 58.4M 33.5M test_tool run_ut @/home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args 284822 2.3G 2.3G 1.8G └─ ydb-core-kqp-ut-runtime --trace-path-append /home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4/ytest.re Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (DataType 'Uint64)) (let $4 '('('"_logical_id" '506) '('"_id" '"966017ff-aa17d63e-6c464c7a-22989dca") '('"_wide_channels" (StructType '('"Value" (OptionalType (DataType 'String))) '('_yql_agg_0 $3))))) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($12) (block '( (let $13 (lambda '($15) (Member $15 '"Key") (Member $15 '"Value"))) (let $14 (lambda '($25 $26) $25 $26)) (return (FromFlow (DqPhyHashCombine (ExpandMap (ToFlow $12) $13) '-1073741824 (lambda '($16 $17) $17) (lambda '($18 $19 $20) (AggrCountInit $19)) (lambda '($21 $22 $23 $24) (AggrCountUpdate $22 $24)) $14))) ))) $4)) (let $6 (DqCnHashShuffle (TDqOutput $5 '0) '('0) '0 '"HashV2")) (let $7 (DqPhyStage '($6) (lambda '($27) (block '( (let $28 (DqPhyHashCombine (ToFlow $27) '"" (lambda '($29 $30) $29) (lambda '($31 $32 $33) $33) (lambda '($34 $35 $36 $37) (AggrAdd $36 $37)) (lambda '($38 $39) $39))) (return (FromFlow (NarrowMap $28 (lambda '($40) (AsStruct '('"column0" $40)))))) ))) '('('"_logical_id" '1266) '('"_id" '"73ac16fd-9380112c-c3fdb06d-f2345649")))) (let $8 (DqCnUnionAll (TDqOutput $7 '0))) (let $9 (DqPhyStage '($8) (lambda '($41) $41) '('('"_logical_id" '1538) '('"_id" '"c02b2a41-b52fa4b8-e1b0cf1c-530f7ea9")))) (let $10 '($5 $7 $9)) (let $11 (DqCnResult (TDqOutput $9 '0) '('"column0"))) (return (KqpPhysicalQuery '((KqpPhysicalTx $10 '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType (StructType '('"column0" $3))) '0 '0)) '('('"type" '"query")))) ) cwd: /home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( ~~~~~~~~^ process_is_finished, ^^^^^^^^^^^^^^^^^^^^ ...<3 lines>... on_check_condition=self._process_progress_listener, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/11121628785/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) ~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/11121628785/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ynks/003703/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TFlatTest::MiniKQLRanges >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/runtime/unittest >> KqpScanLogs::GraceJoin-EnabledLogs 2026-02-19 08:27:44,387 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-02-19 08:27:44,646 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 284151 60.8M 59.3M 34.8M test_tool run_ut @/home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args 284823 2.2G 2.2G 1.8G └─ ydb-core-kqp-ut-runtime --trace-path-append /home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3/ytest.re Test command err: cwd: /home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:42" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '721) '('"_id" '"783dff94-64932ca7-5a69b3c8-8fc2fba7") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV2")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '677) '('"_id" '"c19a70a5-c2bb3569-bcdebb0e-d2606258") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '689) '('"_id" '"18e89ca2-f48dbaea-a582dc85-fcf86918")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) cwd: /home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( ~~~~~~~~^ process_is_finished, ^^^^^^^^^^^^^^^^^^^^ ...<3 lines>... on_check_condition=self._process_progress_listener, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/11121628785/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) ~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/11121628785/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/ynks/003702/ydb/core/kqp/ut/runtime/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2026-02-19T08:27:33.649801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:33.675097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:33.675989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:33.713856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:33.732294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:33.757472Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:33.760026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490128248635950:2081] 1771489653343600 != 1771489653343603 2026-02-19T08:27:33.776986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1771489653815, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:33.789974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771489653815 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:27:33.790063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:35: MkDir::TPropose operationId# 281474976715657:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1771489653815, at schemeshard: 72057594046644480 2026-02-19T08:27:33.790200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976715657:0 Propose -> Done 2026-02-19T08:27:33.790370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:27:33.790439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 36 2026-02-19T08:27:33.790960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:27:33.790973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:27:33.791092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2026-02-19T08:27:33.791628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:27:33.791643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490128248636485:2249], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2026-02-19T08:27:33.791659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490128248636485:2249], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2026-02-19T08:27:33.791696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-02-19T08:27:33.791722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2026-02-19T08:27:33.791809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 1/1 2026-02-19T08:27:33.791823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:27:33.791839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 1/1 2026-02-19T08:27:33.791847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:27:33.791858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2026-02-19T08:27:33.791873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2026-02-19T08:27:33.791882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715692 2026-02-19T08:27:33.791913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715691 2026-02-19T08:27:33.791932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715686 2026-02-19T08:27:33.791955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715690 2026-02-19T08:27:33.791990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715685 2026-02-19T08:27:33.792008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715689 2026-02-19T08:27:33.792021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715684 2026-02-19T08:27:33.792042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715688 2026-02-19T08:27:33.792059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715683 2026-02-19T08:27:33.792070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715687 2026-02-19T08:27:33.792080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715682 2026-02-19T08:27:33.792089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715681 2026-02-19T08:27:33.792100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715680 2026-02-19T08:27:33.792117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715675 2026-02-19T08:27:33.792129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715679 2026-02-19T08:27:33.792138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715674 2026-02-19T08:27:33.792148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715678 2026-02-19T08:27:33.792158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715673 2026-02-19T08:27:33.792171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715677 2026-02-19T08:27:33.792181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715672 2026-02-19T08:27:33.792209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715676 2026-02-19T08:27:33.792224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715671 2026-02-19T08:27:33.792233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715670 2026-02-19T08:27:33.792267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715669 2026-02-19T08:27:33.792281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715664 2026-02-19T08:27:33.792312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715668 2026-02-19T08:27:33.792324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715663 2026-02-19T08:27:33.792334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715667 2026-02-19T08:27:33.792343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715662 2026-02-19T08:27:33.792353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715666 2026-02-19T08:27:33.792364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715661 2026-02-19T08:27:33.792374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715665 2026-02-19T08:27:33.792383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715660 2026-02-19T08:27:33.792392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715659 2026-02-19T08:27:33.792401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:965: Remove dependency, parent tx: 281474976715657, dependent tx: 281474976715658 2026-02-19T08:27:33.792432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-02-19T08:27:33.792475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: scheme ... ne TxId: 281474976710674 ready parts: 1/1 2026-02-19T08:27:34.897616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: true 2026-02-19T08:27:34.897660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7608490132543605024:2362] message: TxId: 281474976710674 2026-02-19T08:27:34.897682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-02-19T08:27:34.897704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710674:0 2026-02-19T08:27:34.897920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710674:0 2026-02-19T08:27:34.898016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 2 2026-02-19T08:27:34.898546Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:27:34.898603Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2026-02-19T08:27:34.900080Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:27:34.900383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490132543604609 RawX2: 4503603922340117 } TabletId: 72075186224037899 State: 4 2026-02-19T08:27:34.900431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:34.900757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:34.900761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2026-02-19T08:27:34.900809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:34.901071Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 0 TabletID: 72075186224037899 2026-02-19T08:27:34.901127Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2026-02-19T08:27:34.901184Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2026-02-19T08:27:34.901241Z node 1 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2026-02-19T08:27:34.901302Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2026-02-19T08:27:34.902735Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7608490128248636249:2122] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7608490128248636305:2146] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-02-19T08:27:34.902865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2026-02-19T08:27:34.903054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 1 2026-02-19T08:27:34.903225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:34.903249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 42], at schemeshard: 72057594046644480 2026-02-19T08:27:34.903293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:34.903344Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037899 reason = ReasonStop 2026-02-19T08:27:34.903487Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2026-02-19T08:27:34.903538Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2026-02-19T08:27:34.903620Z node 1 :HIVE DEBUG: hive_impl.cpp:929: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2026-02-19T08:27:34.903896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:12 2026-02-19T08:27:34.903912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2026-02-19T08:27:34.903966Z node 1 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2026-02-19T08:27:34.904002Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-02-19T08:27:34.904045Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2026-02-19T08:27:34.904095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:34.904681Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037899 2026-02-19T08:27:34.904910Z node 1 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037899 2026-02-19T08:27:34.905544Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2026-02-19T08:27:37.147722Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490144142818286:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:37.147774Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:37.158248Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:37.232413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:37.232777Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:37.235427Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490144142818257:2081] 1771489657146520 != 1771489657146523 2026-02-19T08:27:37.268590Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:37.268672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:37.273949Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:37.448738Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:37.454445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:37.468895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.490106Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:38.153063Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:42.148221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490144142818286:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:42.149770Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:46.938274Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-02-19T08:27:46.950246Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-02-19T08:27:46.951264Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-02-19T08:27:46.967811Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7608490182797530559:6104] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2026-02-19T08:27:40.713440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490157684842833:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:40.714530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:40.929345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:40.948152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:40.948246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:41.010352Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:41.013327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:41.121562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.223288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:41.229262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:41.237528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:41.293812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.723737Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:44.193336Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490173717347403:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:44.195046Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:44.267004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:44.285300Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:44.313338Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:44.313409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:44.317691Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:44.497431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:44.518138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.552630Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks |94.2%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2026-02-19T08:27:41.291849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490163010935991:2223];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:41.291925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:41.514044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.538405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:41.538491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:41.614017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490163010935797:2081] 1771489661276364 != 1771489661276367 2026-02-19T08:27:41.629235Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:41.633095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:41.722518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.730918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:41.730946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:41.730960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:41.731076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:41.917440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:41.929513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:41.940166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:27:42.001653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.294337Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:44.970083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490173178540675:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:44.970146Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:44.993661Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.073844Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490173178540645:2081] 1771489664969239 != 1771489664969242 2026-02-19T08:27:45.083301Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.094981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.095053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.100092Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.171152Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:45.171184Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:45.171191Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:45.171285Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:45.198466Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.386087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:45.409262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.922104Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553163, Sender [2:7608490177473509484:2475], Recipient [2:7608490177473508831:2303]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2026-02-19T08:27:45.922155Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3232: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-02-19T08:27:45.922380Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:27:45.922605Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2026-02-19T08:27:45.922658Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2026-02-19T08:27:45.922686Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2026-02-19T08:27:45.922715Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2026-02-19T08:27:45.922742Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2026-02-19T08:27:45.922829Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2026-02-19T08:27:45.936376Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553163, Sender [2:7608490177473509488:2476], Recipient [2:7608490177473508831:2303]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2026-02-19T08:27:45.936409Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3232: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-02-19T08:27:45.936568Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:27:45.936803Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2026-02-19T08:27:45.936843Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2026-02-19T08:27:45.936914Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 2026-02-19T08:27:45.975562Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpBatchUpdate::ManyPartitions_3 [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse |94.2%| [TA] $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> TFlatTest::SelectRangeReverseItemsLimit |94.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/runtime/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::TestOnlyRequestNeededTablets [GOOD] >> THealthCheckTest::TestNodeDisconnected >> TLocksTest::Range_IncorrectNullDot1 >> TLocksTest::GoodDupLock >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TFlatTest::Mix_DML_DDL >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> TObjectStorageListingTest::Listing >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TObjectStorageListingTest::MaxKeysAndSharding >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: 2026-02-19T08:24:33.084424Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489354300689272:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:33.086049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:33.392184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:33.392293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:33.415472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:33.437009Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489354300689228:2081] 1771489473082100 != 1771489473082103 2026-02-19T08:24:33.456530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:33.459855Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:33.594790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:33.601954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:33.601974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:33.601980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:33.602072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:36.252159Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006b4/r3tmp/spilling-tmp-runner/node_1_941ba1de-289c2e98-84e8f2e-6d30f6d, actor: [1:7608489367185592956:2385] 2026-02-19T08:24:36.252476Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006b4/r3tmp/spilling-tmp-runner 2026-02-19T08:24:38.423194Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423254Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423284Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423309Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423333Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423372Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423428Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423470Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423504Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423541Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423579Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:38.423644Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg0g9d13jgmexa52r8axn2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGMxMzg3MGYtMzY4ODAzOTItNDBiYzhiYi01YTNiYmZjZA==, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:38.424232Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489375775528627:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0g9d13jgmexa52r8axn2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGMxMzg3MGYtMzY4ODAzOTItNDBiYzhiYi01YTNiYmZjZA==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:38.424262Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489375775528627:2401] TxId: 281474976715672. Ctx: { TraceId: 01khtg0g9d13jgmexa52r8axn2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGMxMzg3MGYtMzY4ODAzOTItNDBiYzhiYi01YTNiYmZjZA==, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:38.424667Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715672. Resolved key sets: 11 2026-02-19T08:24:38.424827Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.424876Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.424916Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.424957Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425023Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425067Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425110Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425156Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425221Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:38.425280Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups ... TimeUs: 558 BuildCpuTimeUs: 85 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489668973 CreateTimeMs: 1771489668944 UpdateTimeMs: 1771489668976 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:48.977617Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490190054692000:3706] 2026-02-19T08:27:48.977668Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490190054691999:3705], CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:48.977722Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [12:7608490190054691999:3705], CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:48.978419Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490190054691999:3705] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 2643 DurationUs: 6000 Tasks { TaskId: 3 CpuTimeUs: 772 FinishTimeMs: 1771489668977 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 250 ReadBytes: 2000 AffectedPartitions: 25 } IngressRows: 250 ComputeCpuTimeUs: 669 BuildCpuTimeUs: 103 WaitInputTimeUs: 4363 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489668971 CreateTimeMs: 1771489668943 UpdateTimeMs: 1771489668977 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:48.978472Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490190054691999:3705] 2026-02-19T08:27:48.978514Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:48.978557Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.029337Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490190054691997:3703] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 1909 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1521 FinishTimeMs: 1771489669029 IngressRows: 250 ComputeCpuTimeUs: 572 BuildCpuTimeUs: 949 WaitOutputTimeUs: 81 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489669028 CreateTimeMs: 1771489668942 UpdateTimeMs: 1771489669029 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:49.029416Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.029460Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.030109Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490190054692001:3707] TaskId# 5 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 4656 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 413 FinishTimeMs: 1771489669029 InputBytes: 4 ComputeCpuTimeUs: 283 BuildCpuTimeUs: 130 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489669029 CreateTimeMs: 1771489668944 UpdateTimeMs: 1771489669029 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:49.030170Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.030207Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [12:7608490190054691997:3703], CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.030370Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490190054691997:3703] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 63240 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1525 FinishTimeMs: 1771489669029 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 250 ReadBytes: 2000 AffectedPartitions: 25 } IngressRows: 250 ComputeCpuTimeUs: 576 BuildCpuTimeUs: 949 WaitOutputTimeUs: 81 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489669028 CreateTimeMs: 1771489668942 UpdateTimeMs: 1771489669029 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:49.030437Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490190054691997:3703] 2026-02-19T08:27:49.030489Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.030526Z node 12 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [12:7608490190054692001:3707], trace_id# 2026-02-19T08:27:49.030672Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [12:7608490190054692001:3707] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 66863 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 419 FinishTimeMs: 1771489669029 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 289 BuildCpuTimeUs: 130 HostName: "ghrun-fkrokuvqke" NodeId: 12 StartTimeMs: 1771489669029 CreateTimeMs: 1771489668944 UpdateTimeMs: 1771489669029 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:27:49.030715Z node 12 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [12:7608490190054692001:3707] 2026-02-19T08:27:49.030870Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:27:49.030927Z node 12 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:27:49.030969Z node 12 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [12:7608490190054691986:2545] TxId: 281474976711075. Ctx: { TraceId: 01khtg6b0j4rwh4epvhce0bk5h, Database: /Root, SessionId: ydb://session/3?node_id=12&id=YjUxM2NlODUtOTQxYmRiM2ItOWM3YTZhMTUtYmUxNjcyMmU=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.137723s ReadRows: 999 ReadBytes: 7992 RequestUnits# 999 ForceFlag# true trace_id# >> TFlatTest::MergeEmptyAndWrite [GOOD] >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2026-02-19T08:27:52.394631Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490210209503412:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:52.394716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:52.681846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.705866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.705991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.804238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.806347Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:52.807439Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490210209503284:2081] 1771489672375316 != 1771489672375319 2026-02-19T08:27:52.869792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.056096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:53.063376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:53.097295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.271866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2026-02-19T08:27:53.294726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:53.316205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:27:53.341855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 2026-02-19T08:27:53.398758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> THealthCheckTest::TestStateStorageYellow >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TLocksTest::MultipleLocks [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2026-02-19T08:27:49.355210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490193903760684:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:49.356859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:49.613804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.639578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:49.639687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:49.710795Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490193903760652:2081] 1771489669353099 != 1771489669353102 2026-02-19T08:27:49.723562Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:49.724994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:49.868488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.922979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:49.986827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.981965Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490209340250973:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:53.066457Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:53.125796Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.164427Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.175070Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:53.175143Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:53.202521Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.315131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.437509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:53.442582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:53.459816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.547815Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:53.553838Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:53.577830Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:53.604592Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:27:53.605052Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:210: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-02-19T08:27:53.605079Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:247: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-02-19T08:27:53.610915Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.612889Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.613060Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.614355Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.614487Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.615050Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.615833Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.616211Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.616673Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.617341Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.617441Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.617868Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.618536Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.618623Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.618983Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.619633Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.620074Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.620444Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.621058Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.621150Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.621521Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.622160Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.622559Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.622906Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.623499Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.623582Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.623934Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.624527Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.624609Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.625006Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.625628Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.625998Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.626357Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.626988Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976710676 released its data 2026-02-19T08:27:53.627072Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:27:53.627461Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037889 restored its data 2026-02-19T08:27:53.628055Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:27:53.628078Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:27:53.628224Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:27:53.628578Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976710676 at 72075186224037888 restored its data 2026-02-19T08:27:53.629607Z node 2 : ... ify to schemeshard 72057594046644480 txId 281474976710687 state PreOffline TxInFly 0 2026-02-19T08:27:53.996401Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:27:53.996452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1771489674038 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 809 } } CommitVersion { Step: 1771489674038 TxId: 281474976710687 } 2026-02-19T08:27:53.996465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2026-02-19T08:27:53.996561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1771489674038 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 809 } } CommitVersion { Step: 1771489674038 TxId: 281474976710687 } 2026-02-19T08:27:53.996646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1771489674038 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 809 } } CommitVersion { Step: 1771489674038 TxId: 281474976710687 } debug: NTableState::TProposedWaitParts operationId# 281474976710687:0 2026-02-19T08:27:53.997235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2026-02-19T08:27:53.997395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490213635219339 RawX2: 4503608217307437 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2026-02-19T08:27:53.997416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2026-02-19T08:27:53.997504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490213635219339 RawX2: 4503608217307437 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2026-02-19T08:27:53.997534Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2026-02-19T08:27:53.997604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7608490213635219339 RawX2: 4503608217307437 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2026-02-19T08:27:53.997640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710687:0, shardIdx: 72057594046644480:3, shard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-02-19T08:27:53.997656Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2026-02-19T08:27:53.997669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 281474976710687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-02-19T08:27:53.997688Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710687:0 ProposedWaitParts -> Done 2026-02-19T08:27:53.998016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2026-02-19T08:27:53.998113Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976710687 datashard 72075186224037890 state PreOffline 2026-02-19T08:27:53.998130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2026-02-19T08:27:53.998153Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-02-19T08:27:53.998157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:416: TDropTable TProposedDeletePart operationId: 281474976710687:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:27:53.998478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:27:53.998600Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710687:0 progress is 1/1 2026-02-19T08:27:53.998610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2026-02-19T08:27:53.998626Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710687:0 progress is 1/1 2026-02-19T08:27:53.998656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2026-02-19T08:27:53.998680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710687, ready parts: 1/1, is published: true 2026-02-19T08:27:53.998715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7608490213635219561:2385] message: TxId: 281474976710687 2026-02-19T08:27:53.998735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2026-02-19T08:27:53.998748Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710687:0 2026-02-19T08:27:53.998757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710687:0 2026-02-19T08:27:53.998829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:27:53.999635Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:27:53.999689Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-02-19T08:27:54.001247Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:27:54.001592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490213635219339 RawX2: 4503608217307437 } TabletId: 72075186224037890 State: 4 2026-02-19T08:27:54.001628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:27:54.001889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:27:54.001923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:27:54.001927Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:27:54.004602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:27:54.004853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:27:54.005100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:27:54.005159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:27:54.005224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:27:54.005689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:27:54.005742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:27:54.005782Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:27:54.006792Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:27:54.006844Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7608490213635219449:2772], serverId# [2:7608490213635219450:2773], sessionId# [0:0:0] 2026-02-19T08:27:54.006923Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7608490213635219568:2854], serverId# [2:7608490213635219569:2855], sessionId# [0:0:0] 2026-02-19T08:27:54.007023Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:27:54.007296Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:27:54.007388Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TLocksTest::BrokenSameKeyLock >> THealthCheckTest::ShardsNoLimit [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2026-02-19T08:27:30.570771Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490115758183902:2192];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:30.571067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:30.775594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:30.793083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:30.793171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:30.864912Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:30.873261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:31.063560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:31.083830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:31.089642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:31.155557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:31.272136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:31.317810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.291479Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490133173389362:2260];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:34.291520Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:34.327350Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:34.432212Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490133173389139:2081] 1771489654266456 != 1771489654266459 2026-02-19T08:27:34.444747Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:34.454683Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:34.454795Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:34.459631Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:34.512293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:34.641842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:34.661883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.721976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.769655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.874609Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:37.875641Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:37.962726Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:37.963968Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490143631929282:2081] 1771489657837140 != 1771489657837143 2026-02-19T08:27:37.987415Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:37.987509Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:38.002762Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:38.063899Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:38.211122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:38.222226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:38.231527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:38.235962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:38.280894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:38.337614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.611925Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490162247406545:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:41.611964Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:41.637795Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.711160Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:41.712767Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490162247406513:2081] 1771489661610452 != 1771489661610455 2026-02-19T08:27:41.739623Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:41.739708Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:41.751193Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:41.850680Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.931657Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:41.936809Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:41.948311Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.997461Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.048718Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.465179Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.465325Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.476944Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.479141Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608490178521238320:2081] 1771489665340596 != 1771489665340599 2026-02-19T08:27:45.502143Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.502241Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.517200Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.701823Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.739329Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:45.744810Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:45.756003Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.817127Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.862916Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:49.562551Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608490197007014552:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:49.566785Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:49.585139Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.665469Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:49.667155Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608490197007014488:2081] 1771489669558826 != 1771489669558829 2026-02-19T08:27:49.686127Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:49.686236Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:49.705401Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:49.885131Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.921607Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:49.927653Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:49.944169Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:50.013020Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:50.076629Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.630750Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490212659626972:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:53.630830Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:53.659948Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.752779Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.756450Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490212659626929:2081] 1771489673623743 != 1771489673623746 2026-02-19T08:27:53.782740Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:53.782834Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:53.794768Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.945962Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:54.008116Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:54.013506Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:54.032268Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:54.096853Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:54.152545Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:05.995858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:06.080385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:06.086372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:06.086724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:06.086905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:06.456207Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:06.548944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:06.549090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:06.615067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:06.755968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:07.599281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:07.599342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:07.599375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:07.599998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:11.686237Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:11.692216Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:11.694999Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:353:2230], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:11.695439Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:11.695604Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:12.006410Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:12.079684Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:12.079822Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:12.114614Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:12.171889Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:12.973830Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:12.973898Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:12.973939Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:12.974491Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:16.958301Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:16.962436Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:16.964316Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:16.964533Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:16.964639Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:17.270063Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.363732Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.363868Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.398128Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.466778Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:18.300740Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:18.300801Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:18.300838Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:18.301383Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:25.727046Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:25.727814Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:25.729994Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:696:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:25.738648Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:25.741043Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:25.742582Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:309:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:25.742927Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:25.743104Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:25.744850Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:25.744939Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.102541Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:26.215069Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.215223Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.216069Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.216158Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.261728Z node 8 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2026-02-19T08:27:26.262959Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:26.263436Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:26.329542Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:26.369820Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:27.237915Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:27.237987Z node 8 :NET_CLASSIFIER WARN: net_classifier ... resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:35.909419Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:35.909505Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:36.262371Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:36.364111Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.364255Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.364702Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.364783Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.424402Z node 10 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2026-02-19T08:27:36.425611Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.426028Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.485037Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:36.500117Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:37.334621Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:37.334662Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:37.334682Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:37.334992Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:45.830868Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:45.831962Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:45.832346Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:45.843030Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:45.844671Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:45.846715Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:45.847271Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.847431Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:45.849216Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.849279Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:46.173193Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:46.289219Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.289364Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.289857Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.289941Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.349777Z node 12 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-02-19T08:27:46.350315Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.350648Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.437969Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:46.451368Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:47.287582Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:47.287635Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:47.287664Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:47.288060Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:55.984561Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:55.987550Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:691:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:56.000756Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.002965Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.004693Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:696:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.004945Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:27:56.005472Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.005543Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.007493Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.007564Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.401250Z node 14 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:56.531544Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.531688Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.532177Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.532270Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.584777Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-02-19T08:27:56.585610Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.586022Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.667475Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:56.753089Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:57.506810Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:57.506861Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:57.506883Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:57.507163Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2026-02-19T08:27:52.015702Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490207390424719:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:52.016000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:52.249113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.269599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.269682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.345911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490203095457376:2081] 1771489672013078 != 1771489672013081 2026-02-19T08:27:52.349408Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:52.355723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.505446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.555921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:52.646622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.046742Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:55.347573Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490223352509630:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:55.350088Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:55.371219Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.448800Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:55.488221Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:55.488340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:55.495900Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:55.541667Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.704270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:55.710295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:55.726356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] Test command err: 2026-02-19T08:27:03.726857Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489996219686627:2171];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:03.727408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:03.910878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:27:03.941104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:03.941237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:04.007346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:04.042499Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:04.045976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489996219686484:2081] 1771489623718102 != 1771489623718105 2026-02-19T08:27:04.092407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:04.092441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:04.092447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:04.092578Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:04.105837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:04.260052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:07.066530Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490013845854126:2183];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:07.066756Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:07.089148Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:27:07.165218Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:07.169582Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490013845853981:2081] 1771489627057941 != 1771489627057944 2026-02-19T08:27:07.199752Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:07.199824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:07.204445Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:07.261725Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:07.261747Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:07.261754Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:07.261853Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:07.382750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:07.516004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:07.522621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ... waiting for SysViewsRoster update finished 2026-02-19T08:27:16.376239Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:16.377188Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:16.377927Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:16.388472Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:16.390314Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:16.392396Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:16.393097Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:16.393234Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:16.394795Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:16.394950Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:16.697364Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:16.816514Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:16.816678Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:16.818069Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:16.818163Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:16.876021Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:16.876724Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:16.877346Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:16.962472Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:16.977111Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:17.886890Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:17.886965Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:17.887004Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:17.887439Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:25.654754Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:25.655015Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:25.666668Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:25.668116Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:25.670538Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:506:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:25.671009Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:25.671183Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:25.671904Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:499:2170], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:25.672177Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/wor ... 8:27:45.237701Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.238071Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:45.240029Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.240131Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:45.642361Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.757882Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.758060Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.758957Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.759050Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.807981Z node 9 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-02-19T08:27:45.809061Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.809536Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.866706Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:45.906222Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:46.908326Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:46.908416Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:46.908465Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:46.909350Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status: ORANGE message: "Group dead in some piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" type: "STORAGE_GROUP" level: 4 } issue_log { id: "ORANGE-cf29-1231c6b1-f7549920" status: ORANGE message: "Pool has no redundancy" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "ORANGE-a3e2-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "ORANGE-5cc9-1231c6b1" status: ORANGE message: "Database has storage issues" location { database { name: "/Root" } } reason: "ORANGE-af30-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:56.159712Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:56.160813Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:56.178589Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.181569Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.183198Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:701:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.183563Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.183915Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.185677Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:696:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.186016Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.186314Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.581719Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:56.701197Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.701370Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.702221Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.702327Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.749671Z node 11 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-02-19T08:27:56.750769Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.751351Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.826683Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:56.864497Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:57.814491Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:57.814574Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:57.814624Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:57.815407Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-6100-1231c6b1-2147483648" status: RED message: "Group dead in all piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" reason: "RED-1a83-1231c6b1-2147483650" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-40f1-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-6100-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483650" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483650" pile { name: "2" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483650-3-58-0-58" id: "2147483650-3-59-0-59" id: "2147483650-3-60-0-60" } pile { name: "2" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-5cc9-1231c6b1" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" type: "DATABASE" level: 1 } location { id: 11 host: "::1" port: 12001 pile { name: "pile0" } } |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TLocksTest::CK_BrokenLock [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [GOOD] >> AnalyzeColumnshard::AnalyzeStatus [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2026-02-19T08:27:20.161980Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490071680622284:2176];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:20.163966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:20.377847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:20.387147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:20.387316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:20.445091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:20.450975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490071680622138:2081] 1771489640153386 != 1771489640153389 2026-02-19T08:27:20.463591Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:20.579162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:20.704267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:20.710559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:20.787000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:20.903981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:20.953663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:21.165575Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:23.507447Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490083081261111:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:23.507815Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:23.516217Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:23.612507Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:23.616379Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490083081261061:2081] 1771489643503864 != 1771489643503867 2026-02-19T08:27:23.625753Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:23.625834Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:23.638484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:23.653627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:23.847017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:23.853291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:23.867467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:23.928425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:23.957388Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:23.971620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:26.964752Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490098266314224:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:26.964829Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:26.983529Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:27.052570Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:27.055721Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490098266314193:2081] 1771489646963490 != 1771489646963493 2026-02-19T08:27:27.094843Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:27.094931Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:27.099681Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:27.153112Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:27.285783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:27.304765Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:27.358843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:27.409655Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:30.175491Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490114530291852:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:30.175560Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:30.204705Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:30.269331Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:30.281919Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490114530291743:2081] 1771489650165096 != 1771489650165099 2026-02-19T08:27:30.310137Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:30.310217Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:30.323793Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:30.486489Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:18 ... ation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:43.119646Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:43.127452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:43.133023Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:43.193632Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:43.250477Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:47.020397Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490187911295206:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:47.020492Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:47.031255Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:47.143984Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:47.144079Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:47.145780Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:47.149080Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:47.153898Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490187911295178:2081] 1771489667019274 != 1771489667019277 2026-02-19T08:27:47.184817Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:47.401817Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:47.445255Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:47.469641Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:47.533308Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:47.591745Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.316573Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490204043686839:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:51.317626Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:51.336662Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.433510Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:51.433861Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:51.434861Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:51.437196Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490204043686779:2081] 1771489671310347 != 1771489671310350 2026-02-19T08:27:51.471517Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:51.505258Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.768476Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:51.773554Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:51.794585Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.863794Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.935030Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.224620Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490226054072755:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:56.224688Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:56.259683Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.357917Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.358076Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.358717Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:56.360292Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490226054072724:2081] 1771489676222418 != 1771489676222421 2026-02-19T08:27:56.403723Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.453805Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.745872Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:56.769150Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.834276Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.993497Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:57.241629Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] >> KqpKnn::FloatVectorKnnPushdown >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> BasicUsage::AutoPartitioning_KeyedWriteSession_SmallMessages [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:10.071211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:10.072163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:10.072855Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:10.162809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:10.164037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:10.174783Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:10.175203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:10.175460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.177343Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:10.177483Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.574666Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:10.689224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.689391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.690337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.690455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.736369Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:10.737241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.737787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.827941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:10.879026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:11.681603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:11.681662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:11.681693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:11.682145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:18.279356Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.280284Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.280647Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:18.290362Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.291955Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.293888Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:18.294594Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.294727Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.296169Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.296298Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.611990Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:18.713798Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.713955Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.714801Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.714878Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.773176Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:18.773727Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.774185Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.894020Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:18.917995Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:19.789991Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:19.790061Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:19.790092Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:19.790541Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:26.894319Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.895367Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.897548Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:26.908080Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.910438Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.912263Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:308:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.912845Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.913009Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.914795Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.914945Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:27.226596Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:27.343347Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:27.343467Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:27.344040Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:27.344099Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 N ... nnected -> Connecting 2026-02-19T08:27:36.715425Z node 7 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-02-19T08:27:36.716718Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.717172Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.777085Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:36.805536Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:37.732132Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:37.732204Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:37.732247Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:37.732515Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:46.245497Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:46.246888Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:46.247944Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:46.263153Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:46.264059Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:46.264401Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:46.266518Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:46.266624Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:46.623858Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:46.730354Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.730498Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.731171Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.731239Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.780391Z node 9 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-02-19T08:27:46.781453Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.781963Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.812498Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:46.859694Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:47.788039Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:47.788110Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:47.788158Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:47.788559Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:53.223750Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:53.231654Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:53.234926Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:53.235219Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:53.235410Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:53.645545Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.742823Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:53.742996Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:53.778931Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.841057Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:54.800515Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:54.800603Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:54.800673Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:54.801438Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:28:00.118630Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:00.125589Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:00.128650Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:00.128913Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:00.129055Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:00.602986Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:00.712534Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:00.712707Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:00.776485Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:00.904742Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:02.007055Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:02.007146Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:02.007209Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:02.007807Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:45.856979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.940345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.947909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:45.948236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:45.948404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:46.272686Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.357627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.357764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.414699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.509319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:47.159905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:47.160485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:47.160529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:47.160561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:47.161166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:47.225920Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:47.771931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:50.294641Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:50.300269Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:50.303436Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:50.324933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.325013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.361396Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:50.363365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.510269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:50.510380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:50.511810Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.512421Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.513204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.513996Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.514200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.514607Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.514699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.514789Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.514938Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:50.551248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:50.720488Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:50.756361Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:50.756448Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:50.780918Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:50.781269Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:50.781436Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:50.781478Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:50.781519Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:50.781555Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:50.781597Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:50.781643Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:50.782538Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:50.790568Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.790648Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2136:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:50.806263Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2169:2620] 2026-02-19T08:25:50.807011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2169:2620], schemeshard id = 72075186224037897 2026-02-19T08:25:50.877389Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2259:2667] 2026-02-19T08:25:50.879049Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:50.889191Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Describe result: PathErrorUnknown 2026-02-19T08:25:50.889258Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Creating table 2026-02-19T08:25:50.889356Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:50.896509Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2317:2695], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:50.900587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:50.908206Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:50.908320Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:50.964082Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:51.175244Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:51.193375Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:51.464618Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:51.581857Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:51.581935Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2266:2672] Owner: [2:2265:2671]. Column diff is empty, finishing 2026-02-19T08:25:52.125395Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4246:3783] 2026-02-19T08:26:20.631243Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4246:3783], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:20.631312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:20.631436Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4245:3782], server id = [2:4246:3783], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:20.631522Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:20.631600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4243:3780], StatRequests.size() = 1 2026-02-19T08:26:20.631836Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:14.433393Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4108:3705], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:14.433627Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4108:3705], Start read next stream part 2026-02-19T08:27:14.433842Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3mx94berk1hgwts48ah4", SessionId: ydb://session/3?node_id=2&id=OTcxOWQwMDEtYmQxZDE0MC03NjQ2Yjc3NS04OTc1OGUwNg==, Slow query, duration: 54.484173s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:14.435055Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:14.437518Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:14.437925Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:14.438046Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], Start read next stream part 2026-02-19T08:27:14.440850Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4108:3705], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:14.440924Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4108:3705], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjZhOTZjMjAtZjUxOTRhZjktY2MwYzc3NzAtYmY4YjRmZQ==, TxId: 2026-02-19T08:27:14.531522Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4380:3903]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:14.531812Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:14.531867Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4380:3903], StatRequests.size() = 1 2026-02-19T08:28:01.412179Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:01.412308Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], Start read next stream part 2026-02-19T08:28:01.412507Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5a451g6xq158wsp98je9", SessionId: ydb://session/3?node_id=2&id=Nzg0MjE0YzUtNWFiNDI2NTEtNzBmZDQ3NzAtOGM1MmI2M2I=, Slow query, duration: 46.970373s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:01.414637Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32200, txId: 18446744073709551615] shutting down 2026-02-19T08:28:01.414900Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4468:3986], ActorId: [2:4469:3987], Starting query actor #1 [2:4470:3988] 2026-02-19T08:28:01.414968Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4469:3987], ActorId: [2:4470:3988], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:28:01.418330Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:01.418394Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4098:3701], ActorId: [2:4355:3882], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzIwNWIxZmQtZjExNTM5OWMtYjI1NjZmOGEtYjg5ZjdlODc=, TxId: 2026-02-19T08:28:01.419186Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4469:3987], ActorId: [2:4470:3988], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Mjc2NmRlYmYtZmYxNDFkMjUtNTAwMWNmY2QtNWFjNGJmNjg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:01.502127Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4487:4002]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:01.502400Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:01.502441Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4487:4002], StatRequests.size() = 1 2026-02-19T08:28:01.653857Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4469:3987], ActorId: [2:4470:3988], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mjc2NmRlYmYtZmYxNDFkMjUtNTAwMWNmY2QtNWFjNGJmNjg=, TxId: 2026-02-19T08:28:01.653951Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4469:3987], ActorId: [2:4470:3988], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mjc2NmRlYmYtZmYxNDFkMjUtNTAwMWNmY2QtNWFjNGJmNjg=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:28:01.656095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4503:4010] 2026-02-19T08:28:01.656786Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_IN_PROGRESS 2026-02-19T08:28:01.658205Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4505:4011]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 2
FastCheckInFlight: 1
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 0
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 33], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 34], [OwnerId: 72075186224037897, LocalPathId: 33]
ForceTraversals: 1
    1970-01-01T00:00:04Z
NavigatePathId: 
ForceTraversalOperationId: "operationId"
  CreatedAt: 1970-01-01T00:00:04.037012Z
, ReplyToActorId: [1:1270:3040]
, RequestingActorReattached: 0
, Types: 1
, Tables size: 1
, Tables: 
    Table[0] PathId: [OwnerId: 72075186224037897, LocalPathId: 34]
        Status: AnalyzeStarted
        AnalyzedShards size: 0
        ColumnTags: 1,2
AnalyzeActorId: [2:4098:3701]
TraversalStartTime: 2026-02-19T08:26:19Z
TraversalDatabase: 
TraversalPathId: [OwnerId: 72075186224037897, LocalPathId: 34]
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... 2026-02-19T08:28:01.659628Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4468:3986], ActorId: [2:4469:3987], Got response [2:4470:3988] SUCCESS 2026-02-19T08:28:01.659848Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:01.691012Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:01.691100Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:1270:3040] 2026-02-19T08:28:01.692763Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4509:4014] 2026-02-19T08:28:01.693541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpKnn::VectorSearchKnnPushdown+Nullable |94.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2026-02-19T08:27:43.257563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490168701145195:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:43.257774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:43.504516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:43.504615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:43.569308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:43.586468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:43.587147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490168701145079:2081] 1771489663240993 != 1771489663240996 2026-02-19T08:27:43.594276Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:43.819902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:43.820037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:43.826729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:43.835482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:43.907507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.258916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:48.250906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490168701145195:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:48.250973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:52.612669Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-02-19T08:27:52.631143Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [1771489672148:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-02-19T08:27:52.634949Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7608490207355857411:6124] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2026-02-19T08:27:52.635053Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7608490207355857411:6124] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2026-02-19T08:27:53.488110Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490213060727827:2169];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:53.488218Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:53.504444Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:53.609197Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.611149Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490213060727688:2081] 1771489673479280 != 1771489673479283 2026-02-19T08:27:53.621101Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.623694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:53.623784Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:53.671828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.865480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:53.877910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:53.891736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:53.896496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.902876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:54.501830Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:58.485024Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490213060727827:2169];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:58.485199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:03.206704Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976711361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-02-19T08:28:03.218669Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976711361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable 2026-02-19T08:28:03.221496Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976711361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-02-19T08:28:03.221637Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7608490251715439996:6111] txid# 281474976711361 RESPONSE Status# ExecResultUnavailable marker# P13c >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> THealthCheckTest::LayoutCorrect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::AutoPartitioning_KeyedWriteSession_SmallMessages [GOOD] Test command err: 2026-02-19T08:22:40.569346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488866938481219:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.569431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.604666Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:40.774136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:40.794442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:40.794558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:40.872772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:40.906696Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:40.907758Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488866938481188:2081] 1771489360567572 != 1771489360567575 2026-02-19T08:22:40.993684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.042599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f40/r3tmp/yandexw5PaVv.tmp 2026-02-19T08:22:41.042632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f40/r3tmp/yandexw5PaVv.tmp 2026-02-19T08:22:41.088650Z INFO: TTestServer started on Port 23521 GrpcPort 18698 2026-02-19T08:22:41.106161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f40/r3tmp/yandexw5PaVv.tmp 2026-02-19T08:22:41.106444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:18698 2026-02-19T08:22:41.339271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.350783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:41.362068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:22:41.453894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:41.581619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:41.582007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:22:43.752532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879823384129:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.752625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879823384137:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.752676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.753436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879823384144:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.753489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.756582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:43.768634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488879823384143:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:22:43.831952Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488879823384210:2653] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:44.100711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.102644Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488879823384218:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:44.103071Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZjRmZWY3ZWYtZTkxMDBiZGMtODhjNTk2OWEtZjgyYjJkN2Y=, ActorId: [1:7608488879823384127:2335], ActorState: ExecuteState, LegacyTraceId: 01khtfx1s6fghzbseckk5f08q5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:44.104739Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:22:44.136589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.238465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488884118351795:2829] 2026-02-19T08:22:45.569828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488866938481219:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.569913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:49.554020Z :CreateTopicWithCustomName INFO: TTopicSdkTestSetup started 2026-02-19T08:22:49.578127Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:49.622678Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488905593188463:2921] connected; active server actors: 1 2026-02-19T08:22:49.627913Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic-1771489369] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:49.634930Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic-1771489369] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:49.635085Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic-1771489369] BALANCER INIT DONE for test-topic-1771489369: (0, 72075186224037892) 2026-02-19T08:22:49.635201Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:49.636382Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_000000000000000000 ... 24037892][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.745895Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.746111Z :INFO: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0] PartitionId [3] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:28:02.745915Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][1][StateIdle] Try persist 2026-02-19T08:28:02.746136Z :INFO: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0] PartitionId [3] Generation [1] Write session will now close 2026-02-19T08:28:02.746170Z :DEBUG: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0] PartitionId [3] Generation [1] Write session: aborting 2026-02-19T08:28:02.746267Z :INFO: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0] PartitionId [3] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:28:02.746293Z :DEBUG: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0] PartitionId [3] Generation [1] Write session: destroy 2026-02-19T08:28:02.746354Z :DEBUG: [/Root] TraceId [] SessionId [autopartitioning_keyed_small_1_1|54141a66-92ba5e6d-4a6f505a-6a1790ca_0] PartitionId [1] Generation [1] Write session: destroy 2026-02-19T08:28:02.754071Z node 16 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: autopartitioning_keyed_small_1_2|353e55f8-a421a75b-a98308a-593fd497_0 grpc read done: success: 0 data: 2026-02-19T08:28:02.754106Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: autopartitioning_keyed_small_1_2|353e55f8-a421a75b-a98308a-593fd497_0 grpc read failed 2026-02-19T08:28:02.754145Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: autopartitioning_keyed_small_1_2|353e55f8-a421a75b-a98308a-593fd497_0 grpc closed 2026-02-19T08:28:02.754163Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: autopartitioning_keyed_small_1_2|353e55f8-a421a75b-a98308a-593fd497_0 is DEAD 2026-02-19T08:28:02.755273Z node 16 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037897 (partition=2) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:28:02.755388Z node 16 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 6 sessionId: autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0 grpc read done: success: 0 data: 2026-02-19T08:28:02.755399Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 6 sessionId: autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0 grpc read failed 2026-02-19T08:28:02.755419Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 6 sessionId: autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0 grpc closed 2026-02-19T08:28:02.755434Z node 16 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 6 sessionId: autopartitioning_keyed_small_1_3|ddcedcf5-fe03e3d6-677da837-c339c9e5_0 is DEAD 2026-02-19T08:28:02.756008Z node 16 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037898 (partition=3) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:28:02.756146Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037897] server disconnected, pipe [16:7608490051242941704:2784] destroyed 2026-02-19T08:28:02.756196Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037897][Partition][2][StateIdle] TPartition::DropOwner. 2026-02-19T08:28:02.756245Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037897][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:28:02.756275Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.756300Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037897][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.756333Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.756355Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037897][Partition][2][StateIdle] Try persist 2026-02-19T08:28:02.756411Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037898] server disconnected, pipe [16:7608490051242941710:2787] destroyed 2026-02-19T08:28:02.756431Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037898][Partition][3][StateIdle] TPartition::DropOwner. 2026-02-19T08:28:02.756448Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037898][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:28:02.756461Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.756472Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037898][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.756487Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.756499Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037898][Partition][3][StateIdle] Try persist 2026-02-19T08:28:02.841483Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037898][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:28:02.841550Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841577Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037898][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.841612Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841638Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037898][Partition][3][StateIdle] Try persist 2026-02-19T08:28:02.841727Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:28:02.841739Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841750Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.841767Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841777Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][1][StateIdle] Try persist 2026-02-19T08:28:02.841809Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:02.841820Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841830Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.841843Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841854Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:28:02.841883Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037897][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:28:02.841896Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841906Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037897][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.841920Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.841931Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037897][Partition][2][StateIdle] Try persist 2026-02-19T08:28:02.941531Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037898][Partition][3][StateIdle] Process user action and tx events 2026-02-19T08:28:02.941595Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.941622Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037898][Partition][3][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.941656Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037898][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.941684Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037898][Partition][3][StateIdle] Try persist 2026-02-19T08:28:02.941937Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:28:02.941966Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.941980Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.941999Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.942013Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][1][StateIdle] Try persist 2026-02-19T08:28:02.942047Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:02.942060Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.942071Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.942084Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.942095Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037893][Partition][0][StateIdle] Try persist 2026-02-19T08:28:02.942132Z node 16 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037897][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:28:02.942146Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.942157Z node 16 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037897][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:02.942170Z node 16 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037897][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:02.942180Z node 16 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037897][Partition][2][StateIdle] Try persist |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> THealthCheckTest::NoStoragePools [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:08.515181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:08.516242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:08.516873Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:08.598788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:08.600139Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:08.606623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:08.606881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:08.607150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:08.608895Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:08.608983Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:09.022232Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:09.140078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:09.140212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:09.140960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:09.141033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:09.185378Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:09.185935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:09.186223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:09.314758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:09.328316Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:10.157263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:10.157320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:10.157378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:10.157773Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:17.354123Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:17.354948Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:17.355298Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:17.364185Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:17.366049Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:17.367571Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:17.368167Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:17.368311Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:17.369632Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:17.369762Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:17.723568Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.835323Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.835458Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.836244Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.836325Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.895248Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:17.895839Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.896321Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.993181Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:18.019942Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:18.898724Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:18.898789Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:18.898830Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:18.899326Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:26.483666Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.484797Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.486946Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:26.498279Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.500693Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.502832Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:308:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.503544Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.503719Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.505455Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.505603Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.836790Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:26.941011Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.941152Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.941976Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.942056Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 N ... eScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:36.179983Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:36.182199Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:623:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:36.182797Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:36.183197Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:36.184162Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:618:2283], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:36.184517Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:36.184782Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:36.566517Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:36.675849Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.676017Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.676751Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.676839Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.722200Z node 7 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-02-19T08:27:36.723373Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.723765Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.809079Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:36.839835Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:37.728953Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:37.729015Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:37.729044Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:37.729224Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:42.712301Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:42.718809Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:42.720724Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:459:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:42.720950Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:42.721077Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:43.047127Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:43.157883Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:43.158055Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:43.192818Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:43.219733Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:44.175268Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:44.175349Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:44.175399Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:44.175939Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:44.242413Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-02-19T08:27:56.541503Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:56.548919Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.551943Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.552233Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.552522Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.971157Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:57.075099Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:57.075263Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:57.111873Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:57.170693Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:58.108110Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:58.108186Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:58.108238Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:58.108753Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:28:03.547040Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:03.552396Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:03.554481Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:453:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:03.554859Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:03.554922Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:03.947517Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:04.041635Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:04.041940Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:04.090602Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:04.141475Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:05.006626Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:05.006683Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:05.006716Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:05.007176Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> AnalyzeColumnshard::Analyze [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.616573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.693730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.698292Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.698533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.698648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.014126Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.104667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.104798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.162699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.271925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.883148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.883518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.883552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.883579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.883975Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:42.945537Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.503041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.104221Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.109750Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.112364Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.137911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.138024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.186670Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.188570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.350170Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.350271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.351541Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.352147Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.352718Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.353283Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.353489Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.353584Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.353767Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.353910Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.354050Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.368846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.531697Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.574382Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.574460Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.609090Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.609362Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.609583Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.609641Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.609727Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.609798Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.609851Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.609903Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.610449Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.616215Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.616347Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.640201Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.641384Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.709821Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.712029Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.720797Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.720847Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.720943Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.726357Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.730856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.738015Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.738134Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:46.750072Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:46.954673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:46.993353Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.289031Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.408748Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.408835Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.007166Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 26-02-19T08:26:14.503482Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4227:3770], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:14.539067Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4226:3769] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:14.701953Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4248:3783]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:14.702188Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:14.702247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4250:3785] 2026-02-19T08:26:14.702285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4250:3785] 2026-02-19T08:26:14.702582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4251:3786] 2026-02-19T08:26:14.702686Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4251:3786], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:14.702741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:14.702854Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4250:3785], server id = [2:4251:3786], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:14.702940Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:14.702991Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4248:3783], StatRequests.size() = 1 2026-02-19T08:26:14.703154Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 ... waiting for TEvKqpScan (done) 2026-02-19T08:27:16.079787Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2828:3030];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:997;event=tablet_die; 2026-02-19T08:27:16.100805Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [2:4339:3867]. Got EvDeliveryProblem, TabletId: 72075186224037899, NotDelivered: 0, Starting 2026-02-19T08:27:16.100926Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [2:4339:3867]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037899, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-02-19T08:27:16.220404Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:4346:3872];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 2026-02-19T08:27:16.476243Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:27:16.476344Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR 2026-02-19T08:27:17.245778Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4113:3708], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:17.246062Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4113:3708], Start read next stream part 2026-02-19T08:27:17.246282Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3f8a28rnwa3gr5yv040g", SessionId: ydb://session/3?node_id=2&id=NDE3MmI2MmQtNDk5YjFlYWQtYzE2NmFiZi1lY2FhYzk4Mw==, Slow query, duration: 63.086900s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:17.247907Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:17.251850Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:17.252889Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:17.253055Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], Start read next stream part 2026-02-19T08:27:17.253335Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4113:3708], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:17.253405Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4113:3708], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDA5MGRmOGEtMWU3NGM4NDItNzM2OTNjNmYtYWJjNjZiODM=, TxId: 2026-02-19T08:27:17.366927Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4505:4008]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:17.367259Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:17.367326Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4505:4008], StatRequests.size() = 1 2026-02-19T08:28:05.193654Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:05.193830Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], Start read next stream part 2026-02-19T08:28:05.194032Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5cw4cjxkz0nb6yad13ef", SessionId: ydb://session/3?node_id=2&id=MzYyMjdlZDAtYzQ1ZWUwOS1kMjgwZWVlZi1jNDMzNzg3Yw==, Slow query, duration: 47.935036s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:05.196435Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4596:4093], ActorId: [2:4597:4094], Starting query actor #1 [2:4598:4095] 2026-02-19T08:28:05.196518Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4597:4094], ActorId: [2:4598:4095], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:28:05.197155Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32180, txId: 18446744073709551615] shutting down 2026-02-19T08:28:05.200687Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4597:4094], ActorId: [2:4598:4095], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzFmYTRkOTYtZjkwODc5ZjAtMWYyMzFlZi1kZTYyMDc2ZA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:05.201571Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:05.201625Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4103:3704], ActorId: [2:4478:3985], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTRmOGVlMTQtYTZlOTgzOTktNDM4NTM1ODQtZmY5OWFmOTA=, TxId: 2026-02-19T08:28:05.272736Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4615:4109]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:05.273105Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:05.273153Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4615:4109], StatRequests.size() = 1 2026-02-19T08:28:05.425340Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4597:4094], ActorId: [2:4598:4095], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzFmYTRkOTYtZjkwODc5ZjAtMWYyMzFlZi1kZTYyMDc2ZA==, TxId: 2026-02-19T08:28:05.425437Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4597:4094], ActorId: [2:4598:4095], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzFmYTRkOTYtZjkwODc5ZjAtMWYyMzFlZi1kZTYyMDc2ZA==, TxId: 2026-02-19T08:28:05.425945Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4596:4093], ActorId: [2:4597:4094], Got response [2:4598:4095] SUCCESS 2026-02-19T08:28:05.426441Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:05.453094Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:05.453188Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3174:3442] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:09.569798Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:09.570872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:09.571678Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:09.693965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:09.695929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:09.702125Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:09.702506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:09.702787Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:09.704715Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:09.704826Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.140227Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:10.247961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.248081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.248951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.249055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.293977Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:10.294771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.295230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.400780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:10.414177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:11.233229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:11.233275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:11.233306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:11.233898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:17.306178Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:17.307042Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:17.307386Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:17.316693Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:17.318384Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:17.320343Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:17.321075Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:17.321213Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:17.322685Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:17.322807Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:17.632204Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:17.727942Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.728057Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.728590Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:17.728657Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:17.787479Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:17.787847Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.788145Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:17.889441Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:17.917564Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:18.770085Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:18.770146Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:18.770198Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:18.770587Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:26.314174Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.314476Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:26.326492Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.328136Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:26.330752Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:506:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.331285Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.331487Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.332290Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:499:2170], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:26.332618Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:26.332891Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:26.672241Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:26.773606Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.773782Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:26.774265Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:26.774347Z node 5 :HIVE W ... R ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:46.147976Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:46.148094Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:46.552055Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:46.674549Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.674720Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.675553Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:46.675649Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:46.721165Z node 9 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-02-19T08:27:46.722181Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.722647Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.772461Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:46.825597Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:47.824757Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:47.824840Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:47.824888Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:47.825201Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/ynks/004d26/r3tmp/tmpufjL2Z/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/ynks/004d26/r3tmp/tmpufjL2Z/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/ynks/004d26/r3tmp/tmpufjL2Z/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:56.920607Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:56.921816Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:56.938633Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.941655Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:56.943297Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:701:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.943582Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.943829Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:56.945168Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:696:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:56.945351Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:56.945524Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:57.344162Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:57.444121Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:57.444297Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:57.445196Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:57.445290Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:57.491038Z node 11 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-02-19T08:27:57.491739Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:57.492158Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:57.550724Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:57.565248Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:58.595883Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:58.595967Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:58.596016Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:58.596557Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:28:04.465436Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:04.473190Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:04.477214Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:455:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:04.477590Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:04.477820Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:04.871077Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:04.972651Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:04.972836Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:05.035957Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:05.165947Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:06.155009Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:06.155097Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:06.155149Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:06.155766Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2026-02-19T08:28:00.589101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490241852479747:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:00.590848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:00.836773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:00.836933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:00.871965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:00.888166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:00.890542Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:00.893946Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490241852479641:2081] 1771489680579043 != 1771489680579046 2026-02-19T08:28:01.105346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:01.111452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:01.164190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:01.196839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:01.352607Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:28:01.359214Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:28:01.382275Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:28:01.390058Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-02-19T08:28:01.501732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:28:01.502116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:515: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2026-02-19T08:28:01.502567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:28:01.502633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-02-19T08:28:01.502647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:28:01.502665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:28:01.502696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:28:01.502712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:28:01.502822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-02-19T08:28:01.502924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:28:01.503572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:28:01.503616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-02-19T08:28:01.504197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-02-19T08:28:01.504369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-02-19T08:28:01.504558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:28:01.504577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:01.504700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-02-19T08:28:01.504809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:28:01.504829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490241852480180:2250], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 38 2026-02-19T08:28:01.504843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7608490241852480180:2250], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 40 2026-02-19T08:28:01.504869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2026-02-19T08:28:01.504908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-02-19T08:28:01.505160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:28:01.505305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-02-19T08:28:01.507422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-02-19T08:28:01.507488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-02-19T08:28:01.507538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 0, tablet: 72057594037968897 2026-02-19T08:28:01.507556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2026-02-19T08:28:01.507584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:4, partId: 0 2026-02-19T08:28:01.509619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-02-19T08:28:01.509657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 281474976715676, shardIdx: 72057594046644480:3, partId: 0 2026-02-19T08:28:01.509769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: ... _loans.cpp:128: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2026-02-19T08:28:05.685632Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:28:05.685689Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-02-19T08:28:05.687923Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7608490263815219806:2682], serverId# [2:7608490263815219811:3635], sessionId# [0:0:0] 2026-02-19T08:28:05.687968Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:28:05.688006Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:28:05.688069Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:28:05.688113Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-02-19T08:28:05.690647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490259520250802 RawX2: 4503608217307378 } TabletId: 72075186224037889 State: 4 2026-02-19T08:28:05.690711Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:28:05.691142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:28:05.691192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:28:05.691708Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3403: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-02-19T08:28:05.691773Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-02-19T08:28:05.691938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490259520251089 RawX2: 4503608217307435 } TabletId: 72075186224037891 State: 4 2026-02-19T08:28:05.691970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:28:05.692096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490259520251089 RawX2: 4503608217307435 } TabletId: 72075186224037891 State: 4 2026-02-19T08:28:05.692151Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:28:05.692296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490259520251088 RawX2: 4503608217307434 } TabletId: 72075186224037890 State: 4 2026-02-19T08:28:05.692313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:28:05.692558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:28:05.692574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:28:05.692648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:28:05.692658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:28:05.692709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:28:05.692721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:28:05.693511Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:28:05.693561Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-02-19T08:28:05.693580Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2980: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-02-19T08:28:05.694648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:28:05.694873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:28:05.695035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:28:05.695150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-02-19T08:28:05.695242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:28:05.695350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:28:05.695485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-02-19T08:28:05.695574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:28:05.695588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-02-19T08:28:05.695630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:28:05.695652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:28:05.695668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:28:05.695881Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-02-19T08:28:05.695915Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-02-19T08:28:05.695945Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-02-19T08:28:05.698423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:28:05.698446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:28:05.698522Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:28:05.698543Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-02-19T08:28:05.698559Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:28:05.699580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:28:05.699598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:28:05.699630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:28:05.699646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:28:05.699660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:28:05.699681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:28:05.700568Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-02-19T08:28:05.700648Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037889 2026-02-19T08:28:05.702186Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-02-19T08:28:05.702238Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037891 2026-02-19T08:28:05.703932Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-02-19T08:28:05.703974Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037890 2026-02-19T08:28:05.985658Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2026-02-19T08:28:05.986932Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2026-02-19T08:28:05.987473Z node 2 :HIVE WARN: hive_impl.cpp:2010: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:48.136058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:48.227769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:48.234818Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:48.235099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:48.235240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:48.504708Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:48.584248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:48.584372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:48.641890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:48.765444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:49.404388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:49.404965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:49.405015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:49.405053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:49.405652Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:49.470645Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:50.012034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:52.429990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:52.434672Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:52.436862Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:52.455712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.455791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.502792Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:52.504344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.675400Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:52.675478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:52.676571Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.677052Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.677570Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678405Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678468Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678641Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678751Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.678942Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:52.692821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:52.851395Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:52.888481Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:52.888552Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:52.914735Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:52.914917Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:52.915064Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:52.915119Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:52.915174Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:52.915208Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:52.915248Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:52.915282Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:52.915619Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:52.919399Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:52.919493Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:52.938290Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:52.939109Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:52.990794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:52.992753Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:53.000138Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:53.000193Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:53.000246Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:53.004171Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:53.006893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:53.015934Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:53.016026Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:53.026772Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:53.215385Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:53.285323Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:53.565742Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:53.686009Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:53.686088Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:54.278380Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... und or you don't have access permissions } 2026-02-19T08:26:20.695201Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4149:3727], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:20.695325Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:20.699584Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4165:3731], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:20.701765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:20.750790Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4147:3725], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-02-19T08:26:21.020578Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4234:3773], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:21.046827Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4233:3772] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:21.273399Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4255:3786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:21.273626Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:21.273698Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4257:3788] 2026-02-19T08:26:21.273770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4257:3788] 2026-02-19T08:26:21.274065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4258:3789] 2026-02-19T08:26:21.274170Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4257:3788], server id = [2:4258:3789], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:21.274213Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4258:3789], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:21.274272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:21.274420Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:21.274507Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4255:3786], StatRequests.size() = 1 2026-02-19T08:26:21.274709Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:15.437132Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4120:3711], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:15.437418Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4120:3711], Start read next stream part 2026-02-19T08:27:15.437654Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3nmefrrjtvrj4c6403ne", SessionId: ydb://session/3?node_id=2&id=ZDNkNGMxMzEtN2RkNTQwNzAtZmMyYzhkY2MtZjU3ZTJjNTU=, Slow query, duration: 54.746793s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:15.439194Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:15.439439Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:15.443158Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:15.443295Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], Start read next stream part 2026-02-19T08:27:15.443458Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4120:3711], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:15.443522Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4120:3711], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzJlODRhOS05OTc0OTQ4Yy02MThmOGY3Yy00YjRjZDI2Yw==, TxId: 2026-02-19T08:27:15.555277Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4392:3909]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:15.555576Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:15.555628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4392:3909], StatRequests.size() = 1 2026-02-19T08:28:06.406233Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:06.406379Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], Start read next stream part 2026-02-19T08:28:06.406558Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5b3kaswttz6ccxbxy2xb", SessionId: ydb://session/3?node_id=2&id=OTUwY2FkYTYtNmRlZWEzMWMtMjU3YmM2NTYtOTM5OGNlNGY=, Slow query, duration: 50.958271s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:06.408773Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-02-19T08:28:06.409052Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4480:3992], ActorId: [2:4481:3993], Starting query actor #1 [2:4482:3994] 2026-02-19T08:28:06.409117Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4481:3993], ActorId: [2:4482:3994], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:28:06.412701Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4481:3993], ActorId: [2:4482:3994], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OGVmZmFkYjgtNzQ0YjNlMDktNDM2NThhNGUtYmI0NDljMDU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:06.413292Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:06.413353Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4110:3707], ActorId: [2:4367:3888], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTVhMTM1YmItZjgwMTRhODEtMWEyNDE3Ny01NTUwMzY3Zg==, TxId: 2026-02-19T08:28:06.484587Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4499:4008]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:06.484931Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:06.484985Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4499:4008], StatRequests.size() = 1 2026-02-19T08:28:06.667272Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4481:3993], ActorId: [2:4482:3994], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGVmZmFkYjgtNzQ0YjNlMDktNDM2NThhNGUtYmI0NDljMDU=, TxId: 2026-02-19T08:28:06.667395Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4481:3993], ActorId: [2:4482:3994], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGVmZmFkYjgtNzQ0YjNlMDktNDM2NThhNGUtYmI0NDljMDU=, TxId: 2026-02-19T08:28:06.667879Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4480:3992], ActorId: [2:4481:3993], Got response [2:4482:3994] SUCCESS 2026-02-19T08:28:06.669206Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:06.695549Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:06.695649Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3180:3445] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> AnalyzeColumnshard::AnalyzeServerless [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> KqpKnn::Int8VectorKnnPushdown |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdownFollower-StaleRO |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> TLocksTest::BrokenNullLock [GOOD] >> KqpKnn::VectorSearchKnnPushdownFollower+StaleRO |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.885781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.992385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.997913Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.998259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.998421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.364996Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.452350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.452492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.509887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.627785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.230668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.231121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.231159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.231189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.231703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.295562Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.812096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.445291Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.453087Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.456694Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.480615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.480696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.528443Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.530157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.687216Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.687292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.688273Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.688767Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.689285Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.689867Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.690083Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.690196Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.690338Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.690420Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.690542Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.704450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.895746Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.946053Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.946134Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.980680Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.980849Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.980999Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.981048Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.981112Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.981149Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.981186Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.981223Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.981582Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.985530Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.985616Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.005886Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2627] 2026-02-19T08:25:47.006569Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2627], schemeshard id = 72075186224037897 2026-02-19T08:25:47.066884Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2265:2669] 2026-02-19T08:25:47.068615Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-02-19T08:25:47.078798Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.078848Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:47.078918Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-02-19T08:25:47.085519Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.089795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.102135Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.102284Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.161892Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.340354Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:47.357670Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-02-19T08:25:47.640858Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.749804Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.749887Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.452940Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 4.727000s, at schemeshard: 72075186224037899 2026-02-19T08:26:22.376759Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-02-19T08:26:22.391331Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-02-19T08:26:22.491026Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:26:22.491292Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037899, LocalPathId: 32], AnalyzeActorId: [2:4876:4140] 2026-02-19T08:26:22.496290Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:26:22.500842Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table` 2026-02-19T08:26:22.501078Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], Start read next stream part 2026-02-19T08:26:22.779424Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4913:4158]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:22.779772Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:22.780015Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-02-19T08:26:22.780090Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4916:4161] 2026-02-19T08:26:22.780166Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4916:4161] 2026-02-19T08:26:22.780677Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4917:4162] 2026-02-19T08:26:22.780816Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4916:4161], server id = [2:4917:4162], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:22.780933Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4917:4162], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:22.781018Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:22.781173Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:22.781266Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4913:4158], StatRequests.size() = 1 2026-02-19T08:26:22.781421Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:22.021900Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:22.022157Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], Start read next stream part 2026-02-19T08:27:22.022352Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3qd47sghb4mdf09n1hz6", SessionId: ydb://session/3?node_id=2&id=YWQ1ZDFhYjQtMjFhN2Y1YmEtMjJlMzgzZTctNzI1ZTQ5MjM=, Slow query, duration: 59.511481s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:22.023374Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:22.024183Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-02-19T08:27:22.027186Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:22.027263Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:4886:4144], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2QxZGVmNGItZmNhZjVmOGYtYjU0NzNkZTQtZjM1ZmM4ZGI=, TxId: 2026-02-19T08:27:22.028422Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:22.028546Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], Start read next stream part 2026-02-19T08:27:22.130340Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5046:4278]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:22.130673Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:22.130735Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:5046:4278], StatRequests.size() = 1 2026-02-19T08:28:09.467194Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:09.467327Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], Start read next stream part 2026-02-19T08:28:09.467514Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5hhcbyyc1w4wsabgqzpa", SessionId: ydb://session/3?node_id=2&id=NjY3OTUzYWEtNjQyOTJiZi04ODI1MDkwNi0yOWJkZjBlMw==, Slow query, duration: 47.435534s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:09.470031Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33590, txId: 18446744073709551615] shutting down 2026-02-19T08:28:09.470314Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5137:4363], ActorId: [2:5138:4364], Starting query actor #1 [2:5139:4365] 2026-02-19T08:28:09.470390Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5138:4364], ActorId: [2:5139:4365], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-02-19T08:28:09.473168Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:09.473225Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4876:4140], ActorId: [2:5027:4262], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWU5M2EwZGYtOTBmOTE0Mi03NWZmYThhZS04M2ExNGU5Mw==, TxId: 2026-02-19T08:28:09.474420Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5138:4364], ActorId: [2:5139:4365], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2E0ZDQ0MjUtYWZhNjNlNjAtYzNmNWU5ZTctOGNkYTBhYjk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:09.542288Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5155:4379]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:09.542560Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:09.542823Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-02-19T08:28:09.542875Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:28:09.543003Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:28:09.543069Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:5155:4379], StatRequests.size() = 1 2026-02-19T08:28:09.543135Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:28:09.726685Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5138:4364], ActorId: [2:5139:4365], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2E0ZDQ0MjUtYWZhNjNlNjAtYzNmNWU5ZTctOGNkYTBhYjk=, TxId: 2026-02-19T08:28:09.726799Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5138:4364], ActorId: [2:5139:4365], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2E0ZDQ0MjUtYWZhNjNlNjAtYzNmNWU5ZTctOGNkYTBhYjk=, TxId: 2026-02-19T08:28:09.727358Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5137:4363], ActorId: [2:5138:4364], Got response [2:5139:4365] SUCCESS 2026-02-19T08:28:09.728387Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:09.747717Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-02-19T08:28:09.747829Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3873:3654] >> TExternalTableTest::CreateExternalTable >> TExternalTableTest::DropExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2026-02-19T08:27:43.940546Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490171912418296:2151];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:43.940765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:44.185332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:44.195774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:44.195843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:44.270039Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490171912418173:2081] 1771489663927067 != 1771489663927070 2026-02-19T08:27:44.281599Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:44.286433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:44.370327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:44.492569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:44.497613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:44.571674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.718701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.767553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.955749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:47.661028Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490187802019468:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:47.661100Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:47.685840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:47.750272Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:47.753672Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490187802019424:2081] 1771489667659443 != 1771489667659446 2026-02-19T08:27:47.800631Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:47.800717Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:47.803739Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:47.853192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:47.972234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:47.978457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:47.986682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:47.991270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.057804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.101106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.144543Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490202681647437:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:51.144594Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:51.159696Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.238086Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:51.240047Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490202681647404:2081] 1771489671143772 != 1771489671143775 2026-02-19T08:27:51.272234Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:51.272310Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:51.275943Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:51.366694Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.469590Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:51.474704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:51.490315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.540792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.603003Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:54.899734Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:54.929721Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:54.980136Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.032275Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:55.034049Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490216622026843:2081] 1771489674866081 != 1771489674866084 2026-02-19T08:27:55.044601Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:55.044713Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:55.073667Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#720575 ... ing -> Connected 2026-02-19T08:27:55.256670Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.283854Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:55.304607Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:55.363609Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:55.409987Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.731244Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608490233757909163:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:58.731296Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:58.750494Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:58.837367Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:58.838704Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608490233757909133:2081] 1771489678729599 != 1771489678729602 2026-02-19T08:27:58.866398Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:58.866488Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:58.881694Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:58.882023Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.136872Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:59.144472Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:59.150901Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:27:59.155499Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.204174Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.256169Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.798020Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608490251431296884:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:02.798529Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:02.823362Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.901078Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:02.902659Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608490251431296833:2081] 1771489682795588 != 1771489682795591 2026-02-19T08:28:02.928594Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:02.928700Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:02.950055Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:02.997786Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.159569Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:03.164308Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:03.180882Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.243774Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.292123Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.038349Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490273606713377:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:07.042290Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:07.071449Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:07.192775Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:07.198018Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490273606713330:2081] 1771489687026776 != 1771489687026779 2026-02-19T08:28:07.225072Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:07.225167Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:07.238075Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:07.332703Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:07.511532Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:07.526321Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.593424Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.648958Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2026-02-19T08:27:29.060389Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490109710946196:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:29.061476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:29.311755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:29.311888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:29.336505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.365455Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490109710946138:2081] 1771489649050273 != 1771489649050276 2026-02-19T08:27:29.379040Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:29.385015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:29.572308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:29.588108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:29.594192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:29.669463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.816941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:29.864433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:30.067328Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:32.504512Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490124463283382:2161];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.514881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:32.515421Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.623434Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:32.627203Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490124463283250:2081] 1771489652490541 != 1771489652490544 2026-02-19T08:27:32.634039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.646599Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:32.646672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:32.662587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:32.840656Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.860424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.871633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:32.878883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:32.881908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.932785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.984245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.251051Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490138524798374:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:36.251489Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:36.260988Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:36.348384Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:36.350074Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490138524798315:2081] 1771489656243608 != 1771489656243611 2026-02-19T08:27:36.377546Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:36.377621Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:36.383985Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.388226Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.559433Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:36.565656Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.578847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.630060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.679421Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.985991Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490154825618586:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:39.987622Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:40.057935Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:40.075842Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490154825618547:2081] 1771489659979516 != 1771489659979519 2026-02-19T08:27:40.084926Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:40.096425Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) Vol ... nknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.574637Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:52.580700Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:52.598676Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.673096Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.736997Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.938476Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490224782078386:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:56.939182Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:56.986176Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:57.067424Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:57.067530Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:57.079072Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:57.085431Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490224782078340:2081] 1771489676932786 != 1771489676932789 2026-02-19T08:27:57.098822Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:57.179149Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:57.384842Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:57.394221Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:57.410096Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:57.418012Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:57.483602Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:57.533691Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:01.861454Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490245415858632:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:01.861516Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:01.886339Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:01.979685Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:01.983581Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490245415858601:2081] 1771489681860379 != 1771489681860382 2026-02-19T08:28:02.000089Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:02.000202Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:02.029816Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:02.183534Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.312518Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:02.334432Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.394493Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.450400Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.066725Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490272773536173:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:07.066779Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:07.101443Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:07.210408Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:07.214078Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490272773536130:2081] 1771489687065493 != 1771489687065496 2026-02-19T08:28:07.226582Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:07.226687Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:07.256209Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:07.295552Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:07.545620Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:07.552768Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:07.571468Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.655100Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.717736Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:09.999651Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:10.000638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:10.001341Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:10.103501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:10.105248Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:10.113852Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:10.114199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:10.114437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.116100Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:10.116200Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.553410Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:10.671185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.671353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.672139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.672245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.717312Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:10.718149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.718579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.810116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:10.836072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:11.667985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:11.668034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:11.668068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:11.668730Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-5cc9-1231c6b1" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:18.646764Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.647741Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.648135Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:18.658249Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.659884Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.661928Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:18.662672Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.662814Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.664224Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.664348Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.979171Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:19.080373Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:19.080511Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:19.081321Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:19.081401Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:19.141801Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:19.142340Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:19.142764Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:19.251803Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:19.264869Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:20.062048Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:20.062276Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:20.062321Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:20.062982Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:27.359050Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:27.359764Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:27.361605Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:695:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:27.371891Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:27.374389Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:27.375980Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:308:2231], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:27.376421Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:27.376555Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/i ... de 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:46.746411Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:46.746493Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:46.746537Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:46.747190Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:54.749248Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:54.750344Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:54.766254Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:54.768957Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:54.770320Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:701:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:54.770565Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:54.770873Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:54.772431Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:696:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:54.772701Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:54.772986Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:55.130942Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:55.244012Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:55.244137Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:55.244839Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:55.244929Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:55.292228Z node 11 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-02-19T08:27:55.292983Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:55.293364Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:55.361218Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:55.376645Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:56.232406Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:56.232463Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:56.232494Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:56.232956Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:28:04.854573Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:04.855914Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:04.856632Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:771:2353], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:28:04.870057Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:04.870814Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:04.874673Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:776:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:04.875036Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:04.875255Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:04.877038Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:04.877098Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:05.274082Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:05.417939Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:05.418102Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:05.419141Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:05.419235Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:05.468785Z node 13 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-02-19T08:28:05.470024Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:05.470452Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:05.549615Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:05.583991Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:10.161733Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:10.169590Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:10.169647Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:10.169679Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:10.171020Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:10.188126Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:10.188251Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:10.225095Z node 13 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-02-19T08:28:10.225918Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" reason: "YELLOW-7932-1231c6b1-15" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 13 host: "::1" port: 12001 } >> TExternalTableTest::DropTableTwice >> TExternalTableTest::SchemeErrors >> TObjectStorageListingTest::ManyDeletes [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TLocksTest::GoodSameShardLock [GOOD] >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::ParallelCreateSameExternalTable >> TLocksTest::SetLockNothing [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.486639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.486771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.486837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.486881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.486920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.486966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.486998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.487036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.487159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.487232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.488085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.488467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.601881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.601954Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.602532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.612733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:13.612832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:13.612974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:13.623837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:13.624532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:13.625268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:13.625551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:13.629018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.629160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:13.630648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:13.630697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.630900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:13.630958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:13.631033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:13.631337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:13.769914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.770882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.771886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:28:14.312015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:28:14.312051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:28:14.312087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:28:14.312116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:28:14.312993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:14.313139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:14.313179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:28:14.313217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:28:14.313273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:28:14.314607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:14.314683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:14.314740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:28:14.314784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:28:14.314817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:28:14.314879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:28:14.316730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:28:14.318018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:28:14.318225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:28:14.318277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:28:14.318677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:28:14.318755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.318791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:708:2696] TestWaitNotification: OK eventTxId 101 2026-02-19T08:28:14.319178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:14.319379Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 218us result status StatusSuccess 2026-02-19T08:28:14.319755Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-02-19T08:28:14.322381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.322692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-02-19T08:28:14.322773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-02-19T08:28:14.322822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-02-19T08:28:14.327635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.327836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:28:14.328126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:28:14.328163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:28:14.328544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:28:14.328631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.328665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:716:2704] TestWaitNotification: OK eventTxId 102 2026-02-19T08:28:14.329146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:14.329319Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 190us result status StatusPathDoesNotExist 2026-02-19T08:28:14.329484Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalTableTest::ReadOnlyMode [GOOD] >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable >> TExternalTableTest::SchemeErrors [GOOD] >> KqpSystemView::PartitionStatsRange3 >> TLocksTest::Range_EmptyKey [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.586620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.586731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.586812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.586846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.586883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.586925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.586955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.586985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.587057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.587129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.587992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.588304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.714344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.714421Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.715187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.726025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:13.726128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:13.726300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:13.736862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:13.737498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:13.738276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:13.738608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:13.742758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.742915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:13.744531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:13.744591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.744774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:13.744835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:13.744908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:13.745209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:13.891089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.892944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.893060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.586826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-02-19T08:28:14.586961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:28:14.587186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:28:14.587313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:28:14.588307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:28:14.589733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:28:14.590944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.590984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.591123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:28:14.591231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:28:14.591329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.591387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-02-19T08:28:14.591435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:28:14.591459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-02-19T08:28:14.591530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.591584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:28:14.591680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:28:14.591716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:28:14.591758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:28:14.591789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:28:14.591835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:28:14.591874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:28:14.591927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:28:14.591962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:28:14.592031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:28:14.592072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:28:14.592117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-02-19T08:28:14.592150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:28:14.592180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-02-19T08:28:14.593486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:28:14.593569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:28:14.593607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:28:14.593648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:28:14.593692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:28:14.594996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:28:14.595108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:28:14.595146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:28:14.595177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:28:14.595207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:28:14.595280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:28:14.597774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:28:14.598202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:28:14.598461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:28:14.598516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:28:14.598955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:28:14.599062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.599101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:800:2788] TestWaitNotification: OK eventTxId 104 2026-02-19T08:28:14.599654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:14.599879Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 227us result status StatusSuccess 2026-02-19T08:28:14.600282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2026-02-19T08:27:52.481803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490209792078187:2197];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:52.481869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:52.512220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:52.915559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.915668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.937380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.944446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.025864Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490209792078026:2081] 1771489672468515 != 1771489672468518 2026-02-19T08:27:53.027676Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.175465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.176347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:53.176366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:53.176382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:53.177032Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:53.401146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:53.420043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:53.473435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.489913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:57.481528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490209792078187:2197];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:57.481794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:27:58.298358Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490235545592498:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:58.298416Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:58.344844Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:58.412938Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:58.429410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:58.429497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:58.452855Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:58.507144Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:58.507174Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:58.507183Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:58.507272Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:58.588598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:58.713079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:58.717312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:58.737442Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) .2026-02-19T08:27:59.262211Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; .2026-02-19T08:28:03.294699Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490235545592498:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:03.294761Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:06.895533Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-02-19T08:28:06.895994Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-02-19T08:28:06.896620Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037889 2026-02-19T08:28:06.896759Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037890 2026-02-19T08:28:06.896940Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2026-02-19T08:28:06.897034Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-02-19T08:28:06.897632Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037891 2026-02-19T08:28:06.897725Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037892 2026-02-19T08:28:06.898189Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-02-19T08:28:06.898272Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-02-19T08:28:06.898801Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2026-02-19T08:28:06.898865Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-02-19T08:28:06.907114Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716500 at step 1771489686946 at tablet 72075186224037889 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771489686946 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-02-19T08:28:06.907130Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716500 at step 1771489686946 at tablet 72075186224037891 { Transactions { TxId: 281474976716500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771489686946 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2026-02-19T08:28:06.907157Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:28:06.907162Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:28:06.907301Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:28:06.907319Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-02-19T08:28:06.907328Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:06.907337Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:06.907361Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771489686946:281474976716500] in PlanQueue unit at 72075186224037889 2026-02-19T08:28:06.907362Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771489686946:281474976716500] in PlanQueue unit at 72075186224037891 2026-02-19T08:28:06.907406Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037889 got data tx from cache 1771489686946:281474976716500 2026-02-19T08:28:06.907407Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037891 got data tx from cache 1771489686946:281474976716500 2026-02-19T08:28:06.908498Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:06.908889Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 2 ... 026-02-19T08:28:13.129124Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716911 at step 1771489693169 at tablet 72075186224037892 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1771489693169 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-02-19T08:28:13.129151Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-02-19T08:28:13.129231Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:28:13.129250Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:13.129265Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1771489693169:281474976716911] in PlanQueue unit at 72075186224037892 2026-02-19T08:28:13.129288Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1771489693169:281474976716911 2026-02-19T08:28:13.130390Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976716911 released its data 2026-02-19T08:28:13.130423Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:13.130568Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1771489693169} 2026-02-19T08:28:13.130611Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-02-19T08:28:13.130656Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771489693169 : 281474976716911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7608490299970115257:10480], exec latency: 0 ms, propose latency: 4 ms 2026-02-19T08:28:13.130682Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:28:13.130927Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1771489693169} 2026-02-19T08:28:13.131020Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:28:13.131187Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976716911 released its data 2026-02-19T08:28:13.131227Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:13.131821Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037890 restored its data 2026-02-19T08:28:13.132183Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1771489693169} 2026-02-19T08:28:13.132294Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1771489693169} 2026-02-19T08:28:13.132354Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:28:13.132842Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976716911 released its data 2026-02-19T08:28:13.132873Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:13.132960Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037892 restored its data 2026-02-19T08:28:13.133574Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-02-19T08:28:13.133586Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976716911 released its data 2026-02-19T08:28:13.133601Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:13.134043Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:28:13.134398Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037890 restored its data 2026-02-19T08:28:13.134671Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037889 restored its data 2026-02-19T08:28:13.135248Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:13.135443Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-02-19T08:28:13.136114Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037892 restored its data 2026-02-19T08:28:13.136845Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:13.137228Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:28:13.137278Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771489693169 : 281474976716911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7608490299970115257:10480], exec latency: 7 ms, propose latency: 9 ms 2026-02-19T08:28:13.137306Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:28:13.137990Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:612: tx 281474976716911 released its data 2026-02-19T08:28:13.138016Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:13.141899Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-02-19T08:28:13.141955Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771489693169 : 281474976716911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7608490299970115257:10480], exec latency: 7 ms, propose latency: 12 ms 2026-02-19T08:28:13.141984Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-02-19T08:28:13.144063Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:28:13.144877Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:712: tx 281474976716911 at 72075186224037889 restored its data 2026-02-19T08:28:13.149880Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:13.151990Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-02-19T08:28:13.152046Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1771489693169 : 281474976716911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7608490299970115257:10480], exec latency: 23 ms, propose latency: 26 ms 2026-02-19T08:28:13.152075Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:28:13.167690Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.167990Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2026-02-19T08:28:13.168334Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.168419Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.168651Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.170946Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.171371Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2026-02-19T08:28:13.171781Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2026-02-19T08:28:13.172204Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2026-02-19T08:28:13.172675Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2026-02-19T08:28:13.172819Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> TLocksTest::BrokenDupLock [GOOD] >> KqpSystemView::Sessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.586905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.586983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.587063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.587115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.587164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.587207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.587237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.587270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.587354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.587437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.588265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.588530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.682012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.682084Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.682762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.699283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:13.699394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:13.699534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:13.711457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:13.712250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:13.713245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:13.713996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:13.721693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.721968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:13.723877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:13.723953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.724185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:13.724250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:13.724333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:13.724649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:13.886747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.887774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.887901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.887992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.888916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... meshard: 72057594046678944 2026-02-19T08:28:14.936254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-02-19T08:28:14.936374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:28:14.937147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.937253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.937311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:28:14.937358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-02-19T08:28:14.937419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-02-19T08:28:14.938198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.938277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.938310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:28:14.938341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-02-19T08:28:14.938370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:28:14.938445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-02-19T08:28:14.940572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-02-19T08:28:14.940768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-02-19T08:28:14.942620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:14.942745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.942802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:35: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-02-19T08:28:14.942946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 129:0 Propose -> Done 2026-02-19T08:28:14.943102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-02-19T08:28:14.943163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:28:14.943431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-02-19T08:28:14.943554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-02-19T08:28:14.945189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.945230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.945393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-02-19T08:28:14.945467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.945505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2849], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-02-19T08:28:14.945546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2849], at schemeshard: 72057594046678944, txId: 129, path id: 41 2026-02-19T08:28:14.945774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.945819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 129:0 ProgressState 2026-02-19T08:28:14.945908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:28:14.945946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:28:14.945989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-02-19T08:28:14.946045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:28:14.946085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-02-19T08:28:14.946131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-02-19T08:28:14.946168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-02-19T08:28:14.946205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 129:0 2026-02-19T08:28:14.946274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:28:14.946312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-02-19T08:28:14.946346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-02-19T08:28:14.946379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-02-19T08:28:14.946969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.947060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.947101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:28:14.947140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-02-19T08:28:14.947192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-02-19T08:28:14.948112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.948193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-02-19T08:28:14.948229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-02-19T08:28:14.948257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-02-19T08:28:14.948284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-02-19T08:28:14.948351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-02-19T08:28:14.951184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-02-19T08:28:14.952436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.999129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.999219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.999287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.999347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.999388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.999436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.999466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.999498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.999584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.999686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:14.000587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.000942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:14.119492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:14.119570Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:14.120329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.135706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:14.135818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:14.135999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:14.146831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:14.147407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:14.148107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:14.148397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:14.153825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.154033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:14.156672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.156743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.157027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:14.157090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.157135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:14.157465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.309645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.310613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.310739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.310840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.310904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.310967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... 026-02-19T08:28:14.932599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.932927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2026-02-19T08:28:14.935205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.935447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-02-19T08:28:14.937471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.937741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2026-02-19T08:28:14.937809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.937905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2026-02-19T08:28:14.939681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.939869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-02-19T08:28:14.941875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.942213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2026-02-19T08:28:14.942307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.942414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2026-02-19T08:28:14.944331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.944560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-02-19T08:28:14.947037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.947371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2026-02-19T08:28:14.947515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.947674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2026-02-19T08:28:14.950022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.950320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2026-02-19T08:28:14.952930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.953323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2026-02-19T08:28:14.953422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.953641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2026-02-19T08:28:14.955759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.956018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2026-02-19T08:28:14.958680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:14.959029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2026-02-19T08:28:14.959122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2026-02-19T08:28:14.959277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-02-19T08:28:14.961467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:14.961768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.893281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.893366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.893421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.893450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.893483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.893544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.893565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.893588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.893653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.893740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.894552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.894853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.995665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.995730Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.996483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.006021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:14.006118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:14.006285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:14.017507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:14.018212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:14.019029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:14.019359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:14.024115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.024323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:14.026150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.026217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.026469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:14.026520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.026569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:14.026892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.180308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.181973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.182079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.182178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.182246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.182419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.182495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... T_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.951829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.951951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-02-19T08:28:14.952053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.952075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.952132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-02-19T08:28:14.952275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.952311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.952363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-02-19T08:28:14.952486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-02-19T08:28:14.952565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.952588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.952662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-02-19T08:28:14.952817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.952844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.952891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-02-19T08:28:14.953000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.953110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.953174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-02-19T08:28:14.953281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-02-19T08:28:14.953382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.953458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-02-19T08:28:14.953560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.953642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-02-19T08:28:14.953769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.953889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.953912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-02-19T08:28:14.954174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-02-19T08:28:14.954268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.954847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.954872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.955051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.955077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:828:2816] 2026-02-19T08:28:14.955188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-02-19T08:28:14.955214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:828:2816] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-02-19T08:28:14.959027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:14.959290Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 260us result status StatusSuccess 2026-02-19T08:28:14.959674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::ParallelCreateExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2026-02-19T08:27:32.076989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490122660409300:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:32.077155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:32.269788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.288017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:32.288126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:32.353673Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:32.356327Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490122660409256:2081] 1771489652074406 != 1771489652074409 2026-02-19T08:27:32.365086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:32.471191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:32.585143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:32.689731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.837019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:32.892393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:33.085908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:35.760832Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490135130372972:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:35.761623Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:35.805797Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.868191Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:35.869786Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490135130372940:2081] 1771489655758293 != 1771489655758296 2026-02-19T08:27:35.939676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:35.939757Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:35.943261Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:36.009771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:36.125184Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:36.144710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.200453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.251924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.451360Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490153311869693:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:39.452009Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:39.469996Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:39.540048Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:39.541051Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490153311869647:2081] 1771489659449745 != 1771489659449748 2026-02-19T08:27:39.580190Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:39.580272Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:39.585303Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:39.749801Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:39.762352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:39.786846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.849409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:39.900566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:43.193951Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490171213005448:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:43.194007Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:43.216262Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:43.302861Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490171213005406:2081] 1771489663192907 != 1771489663192910 2026-02-19T08:27:43.326654Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:43.341807Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:43.341907Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:43.344689Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:43.394127Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:43.556923Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:43.564242Z node 4 :FLAT_TX_SCHEMESHARD WARN: ... 1461Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.094089Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:56.098691Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:56.118938Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.195223Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.298360Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:00.064517Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490244876500419:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:00.064889Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:00.095565Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:00.178781Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:00.180931Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490244876500358:2081] 1771489680060440 != 1771489680060443 2026-02-19T08:28:00.216097Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:00.216187Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:00.299699Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:00.303444Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:00.507875Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:00.513382Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:00.530177Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:00.593854Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:00.661370Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:04.913197Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490259479988598:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:04.914071Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:04.941060Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:05.027277Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:05.031080Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490259479988553:2081] 1771489684901591 != 1771489684901594 2026-02-19T08:28:05.045169Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:05.045274Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:05.076343Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:05.220362Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:05.338934Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:05.362041Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:28:05.368168Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:05.443937Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:05.585854Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.778046Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490280033053989:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:09.778142Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:09.821627Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:09.918953Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:09.921285Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490280033053944:2081] 1771489689776719 != 1771489689776722 2026-02-19T08:28:09.937169Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:09.937279Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:09.969044Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:10.021800Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:10.261335Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:10.286209Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.357205Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.413524Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.268636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.269239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.269316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.269373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.269416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.269465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.269493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.269520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.269637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.269725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.270937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.272251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.407420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.407489Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.408134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.421529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:13.421626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:13.423648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:13.436339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:13.437779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:13.438885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:13.441114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:13.446274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.446908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:13.452946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:13.453024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.453843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:13.453900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:13.454569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:13.455405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:13.615021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.616996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.617063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... rd__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:28:15.283048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:28:15.284256Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.284321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.284346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:28:15.284374Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:28:15.284411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:28:15.284459Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:28:15.286046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:28:15.286616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:28:15.287239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:28:15.287398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:28:15.287429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:28:15.287677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:28:15.287766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.287798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:743:2732] TestWaitNotification: OK eventTxId 102 2026-02-19T08:28:15.288089Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.288246Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 178us result status StatusSuccess 2026-02-19T08:28:15.288474Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-02-19T08:28:15.290979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:15.291220Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-02-19T08:28:15.291280Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2026-02-19T08:28:15.291369Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:28:15.292905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-02-19T08:28:15.293053Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:28:15.293242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:28:15.293269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:28:15.293563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:28:15.293626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.293661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:751:2740] TestWaitNotification: OK eventTxId 103 2026-02-19T08:28:15.294162Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.294296Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 160us result status StatusSuccess 2026-02-19T08:28:15.294501Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:14.438563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:14.438678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:14.438744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:14.438787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:14.438831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:14.438874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:14.438906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:14.438947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:14.439034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:14.439110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:14.439950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.440267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:14.573876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:14.573952Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:14.574864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.586194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:14.586303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:14.586480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:14.597106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:14.597725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:14.598471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:14.598787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:14.603351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.603551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:14.605264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.605325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.605532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:14.605579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.605636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:14.605974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.763188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.764941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.765007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.765089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.765155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.765278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... tate 2026-02-19T08:28:15.406719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:28:15.406757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:28:15.406798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:28:15.406832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:28:15.406881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:28:15.406985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:28:15.407030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:28:15.407078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:28:15.407155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:28:15.407198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:28:15.407230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-02-19T08:28:15.407283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:28:15.408612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.408696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.408732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:28:15.408802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:28:15.408855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:28:15.410254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.410341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:28:15.410373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:28:15.410413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:28:15.410451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:28:15.410521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:28:15.413768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:28:15.414267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:28:15.414483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:28:15.414541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:28:15.414919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:28:15.415042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.415157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:729:2717] TestWaitNotification: OK eventTxId 102 2026-02-19T08:28:15.415692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.415936Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 235us result status StatusSuccess 2026-02-19T08:28:15.416291Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-02-19T08:28:15.418999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:15.419325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-02-19T08:28:15.419450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2026-02-19T08:28:15.419604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2026-02-19T08:28:15.422030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:28:15.422283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:28:15.422626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:28:15.422691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:28:15.423146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:28:15.423241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.423278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:737:2725] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2026-02-19T08:27:47.842340Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490185939191768:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:47.846002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:48.052578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:48.088926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:48.089017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:48.148722Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:48.152763Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490185939191739:2081] 1771489667836206 != 1771489667836209 2026-02-19T08:27:48.170750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:48.220668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:48.390068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:48.396238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:48.461497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.596049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.643562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.850297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:51.305648Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490204578181180:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:51.307599Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:51.319454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.386819Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:51.390216Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490204578181148:2081] 1771489671303286 != 1771489671303289 2026-02-19T08:27:51.398934Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:51.399023Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:51.426101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:51.606287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.637762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:51.657051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.709456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.760810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:54.802928Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490217459328595:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:54.803864Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:54.811018Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:54.916588Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:54.929919Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490217459328562:2081] 1771489674799142 != 1771489674799145 2026-02-19T08:27:54.945964Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:54.947746Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:54.947822Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:54.969541Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:55.126887Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.204515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:55.209767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:55.228922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:55.282511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:55.324333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.192002Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490233468125033:2133];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:58.192041Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:58.206987Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:58.316747Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:58.322085Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490233468124940:2081] 1771489678188696 != 1771489678188699 2026-02-19T08:27:58.328304Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:58.335411Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:58.335490Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026 ... -02-19T08:27:58.517608Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:58.599263Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:58.603872Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:58.615256Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.671478Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.718953Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.070369Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608490253172416848:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:02.070956Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:02.153260Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.182059Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:02.184255Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608490253172416778:2081] 1771489682061875 != 1771489682061878 2026-02-19T08:28:02.197073Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:02.197164Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:02.224804Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:02.413814Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.414111Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:02.420051Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:02.438626Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.562044Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.611504Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.294663Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7608490266900037172:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:06.295699Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:06.341816Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.396823Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:06.398583Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608490266900037116:2081] 1771489686290633 != 1771489686290636 2026-02-19T08:28:06.429181Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:06.429267Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:06.467333Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:06.621818Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.683467Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:06.689415Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:06.704425Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.767502Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.820126Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.572314Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490286160368462:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:10.572388Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:10.595920Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:10.685090Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:10.688317Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490286160368432:2081] 1771489690571377 != 1771489690571380 2026-02-19T08:28:10.701956Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:10.702059Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:10.762979Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:10.793415Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:10.994637Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:11.012862Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.070585Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.111068Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.279099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.279199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.279266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.279302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.279339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.279404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.279436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.279461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.279543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.279611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.280442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.280756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:13.417530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:13.417634Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.418498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.430586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:13.430701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:13.430929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:13.442680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:13.443407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:13.444101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:13.444457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:13.450631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.450862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:13.452872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:13.452938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:13.453813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:13.453875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:13.454550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:13.455403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:13.631647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.632662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.632774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.632885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.632974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:13.633806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... 8944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:28:15.319120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:28:15.319221Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:15.319250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:28:15.319286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-02-19T08:28:15.319314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-02-19T08:28:15.319337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:28:15.319553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:28:15.319593Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:28:15.319685Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:28:15.319721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:28:15.319758Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:28:15.319935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:28:15.319972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:28:15.320009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:28:15.320048Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:28:15.320079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:28:15.320167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:28:15.320207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:28:15.320239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2026-02-19T08:28:15.320270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:28:15.320302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:28:15.320325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:28:15.321244Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.321319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.321353Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:28:15.321398Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:28:15.321440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:28:15.322253Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.322325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.322352Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:28:15.322380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:28:15.322414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:28:15.323464Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.323531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:28:15.323555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:28:15.323581Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:28:15.323608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:28:15.323668Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:28:15.325541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:28:15.326094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:28:15.326900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:28:15.327123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:28:15.327164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:28:15.327487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:28:15.327573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.327608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:743:2732] TestWaitNotification: OK eventTxId 101 2026-02-19T08:28:15.328030Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.328210Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 206us result status StatusSuccess 2026-02-19T08:28:15.328551Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:44.712998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:44.828738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:44.835257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:44.835597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:44.835762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:45.229148Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:45.330882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:45.331004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:45.390130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:45.483480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:46.144688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:46.145391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:46.145441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:46.145486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:46.146206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:46.210416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:46.753390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:49.329628Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:49.335595Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:49.338651Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:49.367245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:49.367377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:49.416778Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:49.418743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:49.599151Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:49.599272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:49.600927Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.601817Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.602622Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.603447Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.603813Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.603934Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.604184Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.604353Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.604554Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:49.620371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:49.825540Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:49.876551Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:49.876675Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:49.915083Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:49.915259Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:49.915411Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:49.915453Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:49.915500Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:49.915554Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:49.915593Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:49.915639Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:49.916034Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:49.920027Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:49.920119Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:49.944511Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:49.945728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:50.014827Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:50.018517Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:50.030385Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:50.030443Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:50.030509Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:50.035290Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:50.038770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:50.044968Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:50.045084Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:50.057425Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:50.245117Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:50.306858Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:50.571991Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:50.672035Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:50.672125Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:51.290434Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 19T08:26:18.008789Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4254:3786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:18.009168Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:18.009285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4256:3788] 2026-02-19T08:26:18.009373Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4256:3788] 2026-02-19T08:26:18.009740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4257:3789] 2026-02-19T08:26:18.009812Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4256:3788], server id = [2:4257:3789], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:18.009877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4257:3789], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:18.009943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:18.010075Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:18.010132Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4254:3786], StatRequests.size() = 1 2026-02-19T08:26:18.752714Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:17.814070Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4119:3711], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:17.814319Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4119:3711], Start read next stream part 2026-02-19T08:27:17.814521Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3jdkb91vk5ew6r40j98j", SessionId: ydb://session/3?node_id=2&id=NjMwYzMwMTUtNmNmMjcxMzEtYjE5MjZkMjEtMWQ5ZmMxMGM=, Slow query, duration: 60.412904s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:17.815949Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:17.818618Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:17.819286Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:17.819396Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], Start read next stream part 2026-02-19T08:27:17.819712Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4119:3711], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:17.819768Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4119:3711], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2JhYWM0YjctODM0OGYyMTgtNTFkYjkzNWQtMjMxNmVmYTU=, TxId: 2026-02-19T08:27:17.919676Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4393:3911]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:17.919916Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:17.919964Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4393:3911], StatRequests.size() = 1 2026-02-19T08:28:07.973459Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:07.973619Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], Start read next stream part 2026-02-19T08:28:07.973859Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5ddv51msaabcavmcqnj1", SessionId: ydb://session/3?node_id=2&id=MjI0MGRmY2UtYzRlNmRjODYtMjVhYjViM2UtZmFhNDdiOTc=, Slow query, duration: 50.150089s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:07.976764Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-02-19T08:28:07.977070Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4481:3994], ActorId: [2:4482:3995], Starting query actor #1 [2:4483:3996] 2026-02-19T08:28:07.977154Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4482:3995], ActorId: [2:4483:3996], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:28:07.981512Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4482:3995], ActorId: [2:4483:3996], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWUwMjc1NGEtODA0YTYyYzAtZDM4N2NiNzUtYTJiOGYyNjg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:07.982378Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:07.982438Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4109:3707], ActorId: [2:4366:3888], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjRiNDUyYWQtYWU1N2IxZjYtNzg5NTE0YTktZWJjYThhYjI=, TxId: 2026-02-19T08:28:08.061496Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4500:4010]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:08.061865Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:08.061918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4500:4010], StatRequests.size() = 1 2026-02-19T08:28:08.223411Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4482:3995], ActorId: [2:4483:3996], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWUwMjc1NGEtODA0YTYyYzAtZDM4N2NiNzUtYTJiOGYyNjg=, TxId: 2026-02-19T08:28:08.223522Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4482:3995], ActorId: [2:4483:3996], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWUwMjc1NGEtODA0YTYyYzAtZDM4N2NiNzUtYTJiOGYyNjg=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-02-19T08:28:08.319463Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:28:08.319565Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:28:08.345585Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:4256:3788], schemeshard count = 1 2026-02-19T08:28:08.617523Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:26: [72075186224037894] Delete long analyze operation, OperationId="operationId" 2026-02-19T08:28:09.006713Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:28:09.007007Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 1 2026-02-19T08:28:09.007145Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-02-19T08:28:09.018197Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-02-19T08:28:09.018362Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-02-19T08:28:09.018689Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-02-19T08:28:09.907851Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-02-19T08:28:09.907942Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8557: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:28:09.907996Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8588: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-02-19T08:28:09.908052Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-02-19T08:28:13.720371Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:44: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId="operationId", ActorId=[1:3180:3445] 2026-02-19T08:28:13.720536Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete >> KqpKnn::FloatVectorKnnPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:14.945352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:14.945428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:14.945522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:14.945570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:14.945609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:14.945651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:14.945680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:14.945727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:14.945821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:14.945901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:14.946738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.947030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:15.035004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:15.035075Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:15.035823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:15.046682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:15.046799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:15.047013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:15.059950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:15.066238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:15.066873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:15.067278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:15.072807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:15.073062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:15.075292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:15.075368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:15.075629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:15.075689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:15.075740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:15.076079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:15.223989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.225994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.226083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:15.226232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:15.847153Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.847387Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 210us result status StatusSuccess 2026-02-19T08:28:15.847699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-02-19T08:28:15.848029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-02-19T08:28:15.848099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-02-19T08:28:15.848197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-02-19T08:28:15.848221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-02-19T08:28:15.848265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-02-19T08:28:15.848284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-02-19T08:28:15.848736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-02-19T08:28:15.848868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.848915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:746:2734] 2026-02-19T08:28:15.849281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-02-19T08:28:15.849368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.849394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:746:2734] 2026-02-19T08:28:15.849469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-02-19T08:28:15.849542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-02-19T08:28:15.849565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:746:2734] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-02-19T08:28:15.850150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:15.850347Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 202us result status StatusSuccess 2026-02-19T08:28:15.850665Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-02-19T08:28:15.853302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:28:15.853644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-02-19T08:28:15.853777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2026-02-19T08:28:15.853914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:28:15.856402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-02-19T08:28:15.856621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2026-02-19T08:28:13.988086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:28:13.988170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.988260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:28:13.988318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:28:13.988362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:28:13.988411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:28:13.988444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:28:13.988474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:28:13.988568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:28:13.988655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:28:13.989574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:13.989908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:28:14.118521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:28:14.118597Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:14.119536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:28:14.134278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:28:14.134401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:28:14.134589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:28:14.145594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:28:14.146234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:28:14.147026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:28:14.147342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:28:14.151849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.152090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:28:14.153900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:28:14.153967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:28:14.154193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:28:14.154244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:28:14.154288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:28:14.154605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:28:14.310175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.311932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.312005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.312089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.312180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:28:14.312307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transacti ... hildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:16.144664Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:16.144839Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 183us result status StatusSuccess 2026-02-19T08:28:16.145086Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:16.145657Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:16.145785Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 138us result status StatusSuccess 2026-02-19T08:28:16.146092Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:16.146629Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:16.146744Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 131us result status StatusSuccess 2026-02-19T08:28:16.146934Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:28:16.147332Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:28:16.147458Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 141us result status StatusSuccess 2026-02-19T08:28:16.147718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2026-02-19T08:27:35.012302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490137390379580:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:35.014736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:35.044457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:35.316396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:35.316527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:35.321787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:35.364477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.365548Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490133095412235:2081] 1771489655008960 != 1771489655008963 2026-02-19T08:27:35.365581Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:35.524768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:35.566267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:35.572130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:35.582168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:35.663012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:35.799663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:35.853752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:36.018845Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:38.487825Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:38.489991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:38.583014Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:38.583114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:38.585172Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490147337395737:2081] 1771489658416958 != 1771489658416961 2026-02-19T08:27:38.597025Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:38.607649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:38.742668Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:38.805601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:38.814610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:38.825129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:38.885396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:38.934044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.043876Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490167517365083:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:42.043955Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:42.060955Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:42.131552Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:42.171585Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:42.171675Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:42.174567Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:42.243939Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:42.380023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:42.392949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:27:42.398969Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.445150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.491351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.789899Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490177906043824:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:45.790777Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:45.803418Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.880467Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.914543Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.914629Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.917943Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:46.027588Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:46.125656Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__op ... 97 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:57.870811Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:57.875310Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:58.119389Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:58.129926Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:58.147267Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.225627Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:58.290174Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.085746Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490249621234284:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:02.088067Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:02.110115Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.234828Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490249621234245:2081] 1771489682078337 != 1771489682078340 2026-02-19T08:28:02.247179Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:02.248801Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:02.248893Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:02.253154Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:02.342788Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.581740Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:02.609112Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.674757Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.739519Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.774091Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490267164264066:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:06.774129Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:06.788172Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.884771Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490267164264012:2081] 1771489686772923 != 1771489686772926 2026-02-19T08:28:06.903969Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:06.918560Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:06.918661Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:06.921484Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:07.083170Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:07.187152Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:07.202271Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:07.209516Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.299910Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.366125Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.335633Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490289486096015:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:11.336073Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:11.354432Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:11.452084Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:11.452175Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:11.452615Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:11.454393Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490289486095969:2081] 1771489691333935 != 1771489691333938 2026-02-19T08:28:11.487470Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:11.537366Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:11.712661Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:11.734867Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.795192Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.853282Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [GOOD] >> KqpSystemView::NodesOrderByDesc >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] >> KqpSystemView::QueryStatsSimple >> KqpSysColV1::InnerJoinTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2026-02-19T08:27:33.651083Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490124931164991:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:33.651190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:33.679778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:34.011650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:34.011767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:34.078962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:34.098172Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490124931164765:2081] 1771489653606866 != 1771489653606869 2026-02-19T08:27:34.101096Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:34.116337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:34.263938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:34.359399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:34.365578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:34.439732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.583092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:34.640939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:34.647245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.370733Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490145879731689:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:37.370771Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:37.409720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:37.489350Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:37.490562Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490145879731646:2081] 1771489657369441 != 1771489657369444 2026-02-19T08:27:37.506462Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:37.506536Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:37.560206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:37.604673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:37.716939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:37.722240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:37.735179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.786846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:37.842386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.065918Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490161638256741:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:41.065964Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:41.089328Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.161648Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:41.164441Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490161638256711:2081] 1771489661064789 != 1771489661064792 2026-02-19T08:27:41.199385Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:41.199465Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:41.203988Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:41.261762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:41.377185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:41.382875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:41.395749Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.448690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:41.498480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:44.778683Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490172624521118:2174];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:44.778950Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:44.787948Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:44.876447Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:44.879411Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490172624520959:2081] 1771489664760727 != 1771489664760730 2026-02-19T08:27:44.908135Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:44.908230Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:44.918841Z node 4 :HIVE WARN: n ... k, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:57.288474Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:57.308140Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:57.369249Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:57.468237Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:01.469321Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490248678394300:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:01.469362Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:01.507147Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:01.586740Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:01.597108Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490248678394269:2081] 1771489681467999 != 1771489681468002 2026-02-19T08:28:01.619474Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:01.619578Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:01.632482Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:01.807636Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:01.880932Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:01.887311Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:01.895461Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:01.900980Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:01.960241Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:02.018599Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.317954Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490269448415670:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:06.318022Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:06.337960Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.411921Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:06.413880Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490269448415631:2081] 1771489686316385 != 1771489686316388 2026-02-19T08:28:06.442244Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:06.442318Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:06.453033Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:06.518376Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.838009Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:06.862079Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:06.874537Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.002414Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.065581Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:07.332211Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:11.182542Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490290659822469:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:11.182633Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:11.202594Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:11.287052Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:11.289288Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490290659822432:2081] 1771489691181428 != 1771489691181431 2026-02-19T08:28:11.329895Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:11.330004Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:11.333928Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:11.413543Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:11.598596Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:11.624997Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.696286Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.803648Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView |94.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:06.827106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:06.950856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:06.970845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:06.970929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:06.971495Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:07.276120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:07.276258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:07.362701Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489684005631 != 1771489684005635 2026-02-19T08:28:07.376232Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:07.424629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:07.555426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:07.934305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:07.934376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:07.934466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:07.935009Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:07.947121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:08.279919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:08.377568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:08.633926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.007136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.301575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.071566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:10.071770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:10.072916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:10.073025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:10.106758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.313382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.562773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.842922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.091632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.352590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.628051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.928722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.291378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:12.291515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:12.291902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:12.292418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:12.292485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:12.297702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:12.469917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2803:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:12.519502Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2863:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:14.087669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:42.058556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:42.131281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:42.135537Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:42.135793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:42.135924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.427933Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.505875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.505961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.562854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.662077Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.285811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.285862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.285890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.287783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.288199Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.350595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.887255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.152548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.158796Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.162345Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.189337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.189447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.227424Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.229398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.395554Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.395636Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.396788Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.397455Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.398069Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.398521Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.398673Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.398751Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.398976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.399083Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.399169Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.413989Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.622033Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.675070Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.675178Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.716228Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.716460Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.716674Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.716734Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.716783Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.716865Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.716927Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.716977Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.717436Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.722836Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.722967Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2137:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.747663Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2629] 2026-02-19T08:25:47.748914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2185:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:47.818241Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2266:2670] 2026-02-19T08:25:47.820353Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.834216Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.834290Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Creating table 2026-02-19T08:25:47.834415Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.878363Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2323:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.882859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.891284Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.891447Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.905839Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:47.969227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.113464Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.419004Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.549672Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.549785Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2272:2674] Owner: [2:2271:2673]. Column diff is empty, finishing 2026-02-19T08:25:49.148646Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... und or you don't have access permissions } 2026-02-19T08:26:20.729873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4149:3725], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:20.729995Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:26:20.734284Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4166:3730], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:20.736936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:26:20.778731Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4148:3724], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-02-19T08:26:21.070117Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4235:3772], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:21.139389Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:4234:3771] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:26:21.422594Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4256:3785]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:26:21.422962Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-02-19T08:26:21.423052Z node 2 :STATISTICS DEBUG: service_impl.cpp:1183: ConnectToSA(), pipe client id = [2:4258:3787] 2026-02-19T08:26:21.423111Z node 2 :STATISTICS DEBUG: service_impl.cpp:1212: SyncNode(), pipe client id = [2:4258:3787] 2026-02-19T08:26:21.423460Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4259:3788] 2026-02-19T08:26:21.423597Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: EvClientConnected, node id = 2, client id = [2:4258:3787], server id = [2:4259:3788], tablet id = 72075186224037894, status = OK 2026-02-19T08:26:21.423673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4259:3788], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-02-19T08:26:21.423739Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-02-19T08:26:21.423866Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-02-19T08:26:21.423944Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 1, ReplyToActorId = [2:4256:3785], StatRequests.size() = 1 2026-02-19T08:26:21.424192Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-02-19T08:27:23.652168Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4124:3710], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:27:23.652444Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4124:3710], Start read next stream part 2026-02-19T08:27:23.652669Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg3nnf0f01cbz36tqkh45f", SessionId: ydb://session/3?node_id=2&id=MjgyOTVjZjUtMzQwZjJhYzctOGVkMzhjNWItYzk0OTc1Zjc=, Slow query, duration: 62.928243s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:23.654400Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:23.657276Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:23.657679Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:27:23.657825Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], Start read next stream part 2026-02-19T08:27:23.660989Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4124:3710], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:27:23.661069Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4124:3710], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzdjZWFkYmEtMjczYjY3NDUtNDllYmQ1MTMtMTYyODQzMw==, TxId: 2026-02-19T08:27:23.765993Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4395:3910]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:23.766306Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:23.766375Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4395:3910], StatRequests.size() = 1 2026-02-19T08:28:14.770700Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:14.770848Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], Start read next stream part 2026-02-19T08:28:14.771036Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5k498r9ewn5zaymyykvk", SessionId: ydb://session/3?node_id=2&id=YWQ5NDZmZWEtZDkzZGNjMTItZTViOGU0YmYtZGZiODk5NzU=, Slow query, duration: 51.108536s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:14.773662Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32180, txId: 18446744073709551615] shutting down 2026-02-19T08:28:14.773951Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4483:3993], ActorId: [2:4484:3994], Starting query actor #1 [2:4485:3995] 2026-02-19T08:28:14.774023Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4484:3994], ActorId: [2:4485:3995], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:28:14.778371Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4484:3994], ActorId: [2:4485:3995], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2EwODc3NzQtMWI3ZmUxNTEtMTIyN2RhYjEtZGM3NGRiNWM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:14.779026Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:14.779082Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4111:3706], ActorId: [2:4368:3887], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTUzMTFhMzMtNTFlZWUwYTUtMjcxNGUyOTctMjAwOWQwNzQ=, TxId: 2026-02-19T08:28:14.850928Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4502:4009]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:14.851128Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:14.851158Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4502:4009], StatRequests.size() = 1 2026-02-19T08:28:14.994653Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4484:3994], ActorId: [2:4485:3995], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2EwODc3NzQtMWI3ZmUxNTEtMTIyN2RhYjEtZGM3NGRiNWM=, TxId: 2026-02-19T08:28:14.994732Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4484:3994], ActorId: [2:4485:3995], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2EwODc3NzQtMWI3ZmUxNTEtMTIyN2RhYjEtZGM3NGRiNWM=, TxId: 2026-02-19T08:28:14.995062Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4483:3993], ActorId: [2:4484:3994], Got response [2:4485:3995] SUCCESS 2026-02-19T08:28:14.995310Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:15.021139Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:15.021237Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3180:3447] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect >> KqpSystemView::FailResolve |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk >> KqpSystemView::NodesRange1 >> KqpSystemView::FailNavigate >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> KqpSystemView::PartitionStatsSimple >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> KqpSysColV0::SelectRowById >> KqpSystemView::CompileCacheQueriesOrderByDesc >> KqpSysColV1::StreamSelectRange >> KqpSystemView::PartitionStatsOrderByDesc >> KqpSysColV1::SelectRowById >> KqpSystemView::Join >> KqpSysColV1::SelectRange >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView >> KqpSystemView::PartitionStatsRange3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2026-02-19T08:22:40.639214Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608488866419878217:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:40.639819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:22:40.685498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:22:40.703448Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:22:41.063309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:22:41.085974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:22:41.086066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:22:41.134292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:22:41.245944Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:41.250128Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608488866419878160:2081] 1771489360628402 != 1771489360628405 2026-02-19T08:22:41.288734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:22:41.346989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f41/r3tmp/yandexyv9oV1.tmp 2026-02-19T08:22:41.347008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f41/r3tmp/yandexyv9oV1.tmp 2026-02-19T08:22:41.347152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f41/r3tmp/yandexyv9oV1.tmp 2026-02-19T08:22:41.347221Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:22:41.381651Z INFO: TTestServer started on Port 15288 GrpcPort 11256 PQClient connected to localhost:11256 2026-02-19T08:22:41.610163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:41.618666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:22:41.645309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:22:41.734610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:43.969229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879304781088:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.969418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.969829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879304781108:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.969884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608488879304781109:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.970039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:22:43.976412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:22:44.016397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-02-19T08:22:44.016811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608488879304781112:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:22:44.260151Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608488883599748472:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:22:44.297626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.340501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:22:44.442453Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608488883599748480:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:22:44.444589Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZmUyYjMxOS1hMDk1ODkyNi00NjdjZTlmMS00N2JkZWMyNg==, ActorId: [1:7608488879304781076:2334], ActorState: ExecuteState, LegacyTraceId: 01khtfx1zg5cbv7knnsc6czs2d, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:22:44.447256Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:22:44.447730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608488883599748757:2820] 2026-02-19T08:22:45.637819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608488866419878217:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:22:45.637888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:22:49.964986Z :WriteToTopic_Demo_11_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:22:50.001244Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:22:50.034682Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608488909369552721:2910] connected; active server actors: 1 2026-02-19T08:22:50.034943Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:22:50.037127Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:22:50.037274Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:22:50.054236Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:22:50.055444Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000 ... PendingWrites: 0 2026-02-19T08:28:19.466584Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.466650Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.466663Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.466673Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.466687Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.466698Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.466729Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:19.466741Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.466762Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.466776Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.466786Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.478453Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.478496Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.478513Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.478537Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.478567Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.478618Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:28:19.478631Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.478642Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.478658Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.478668Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][1][StateIdle] Try persist 2026-02-19T08:28:19.566767Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:19.566797Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566784Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:19.566808Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.566819Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566826Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566833Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.566839Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.566852Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566865Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.566880Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.566888Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566896Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.566905Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.566912Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.578556Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.578587Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.578598Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.578614Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.578626Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.578662Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:28:19.578670Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.578679Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.578693Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.578702Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][1][StateIdle] Try persist 2026-02-19T08:28:19.667174Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:19.667213Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667227Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.667249Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667266Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.667322Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.667334Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667346Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.667362Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667372Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.667416Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:28:19.667431Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667441Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.667451Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.667457Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:28:19.678950Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-02-19T08:28:19.678986Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.679002Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.679023Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.679038Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-02-19T08:28:19.679085Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:28:19.679098Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.679107Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:28:19.679121Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:28:19.679131Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][1][StateIdle] Try persist >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> TObjectStorageListingTest::SchemaChecks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: 2026-02-19T08:28:15.541877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490306285765726:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:15.550399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:15.802001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:15.802080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:15.837319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:15.853555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:15.855849Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:15.858402Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490306285765693:2081] 1771489695539559 != 1771489695539562 2026-02-19T08:28:15.962476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:15.962510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:15.962518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:15.962661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:16.120353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:16.392381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:16.398902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:16.465033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.563821Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:16.603472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.771554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.846834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.663999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490319170669469:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.664107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.664933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490319170669479:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.664980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.082079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.129075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.164083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.196962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.232286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.277367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.348475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.391903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.495618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490323465637653:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.495698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.496262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490323465637658:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.496313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490323465637659:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.496486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.500878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:19.514854Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490323465637662:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:19.590960Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490323465637715:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:20.541239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490306285765726:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.541317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:21.464485Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489701457, txId: 281474976710673] shutting down >> TLocksTest::Range_CorrectDot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: 2026-02-19T08:28:16.120514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490313812721527:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:16.120596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:16.337038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:16.367207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:16.367322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:16.429165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490313812721496:2081] 1771489696118868 != 1771489696118871 2026-02-19T08:28:16.440743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:16.446229Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:16.534398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:16.563299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:16.563336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:16.563343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:16.563438Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:17.006576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:17.011848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:17.058768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.130854Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:17.211101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.362214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.428236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.345651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490326697625258:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.345776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.346337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490326697625268:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.346389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.685745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.717115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.748331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.778513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.812337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.850883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.902236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.991450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.087501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330992593436:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.087608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.088076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330992593441:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.088125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330992593442:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.088224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.092400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:20.104686Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490330992593445:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:20.194030Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490330992593498:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:21.125523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490313812721527:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.125594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpKnn::Int8VectorKnnPushdown [GOOD] >> KqpSysColV0::UpdateAndDelete >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSystemView::FailResolve [GOOD] >> KqpSystemView::Sessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2026-02-19T08:27:53.447671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490211769863977:2199];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:53.447926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:53.708457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.739707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:53.739799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:53.792978Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490211769863807:2081] 1771489673424154 != 1771489673424157 2026-02-19T08:27:53.800578Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.810120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.882691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.900304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:53.900335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:53.900353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:53.900474Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:54.088955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:54.095810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:54.103992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:54.174440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:54.447323Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:58.445164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490211769863977:2199];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:58.445218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:08.683685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:28:08.683715Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.727540Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490322983342205:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.727593Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.833405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:19.836044Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.836131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.836827Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.849790Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490322983342169:2081] 1771489699710965 != 1771489699710968 2026-02-19T08:28:19.883314Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.978304Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.978328Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.978335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.978415Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:20.017814Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:20.195169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.202890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:20.222910Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:20.238633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.750127Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpSysColV1::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2026-02-19T08:27:41.863210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490161059334030:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:41.863844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:42.056819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:42.086545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:42.086657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:42.148468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:42.188437Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:42.292349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:42.399471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:42.410213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:42.507631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.641410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.698141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:42.871380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:45.284476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.284590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:45.355269Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.361636Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490178401085315:2081] 1771489665255316 != 1771489665255319 2026-02-19T08:27:45.397363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:45.397444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:45.401389Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.481802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.606251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:45.618819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:45.637868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:27:45.642191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.698549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.739382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:49.085188Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490197691164850:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:49.085248Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:49.096340Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.170785Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490197691164812:2081] 1771489669084265 != 1771489669084268 2026-02-19T08:27:49.179931Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:49.193633Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:49.193748Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:49.196965Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:49.344889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:49.370317Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:49.375381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:49.425395Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:49.485192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.906777Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.909691Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:52.993258Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.993334Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.995599Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490209770757462:2081] 1771489672835729 != 1771489672835732 2026-02-19T08:27:53.007544Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:53.021926Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:53.172274Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:53.214939Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:53.222058Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, ... 91Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:05.434203Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:05.456104Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:05.527531Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:05.648026Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.330582Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490283531188345:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:09.330739Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:09.349641Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:09.429616Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:09.432405Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490283531188309:2081] 1771489689329468 != 1771489689329471 2026-02-19T08:28:09.462391Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:09.462494Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:09.475002Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:09.567557Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:09.719065Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:09.739859Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.829157Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:09.912116Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.672880Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490298116820784:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:13.672951Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:13.692222Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:13.783799Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.785446Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490298116820753:2081] 1771489693671728 != 1771489693671731 2026-02-19T08:28:13.819014Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:13.819123Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:13.833537Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:13.856417Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:14.044464Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:14.052549Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:14.065326Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:14.129386Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:14.190943Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.678267Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490320718775988:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.678325Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.717934Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:18.820438Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.820580Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.837802Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490320718775942:2081] 1771489698676686 != 1771489698676689 2026-02-19T08:28:18.850292Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:18.867794Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:18.965808Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:19.173538Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:19.179408Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:19.194205Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:19.200150Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.267767Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.333799Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView >> KqpSystemView::FailNavigate [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:14.200476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:14.312813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:14.330557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:14.330636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:14.331087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:14.619417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:14.619555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:14.694360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489691500734 != 1771489691500738 2026-02-19T08:28:14.702925Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:14.749840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:14.870166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:15.214037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:15.214143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:15.214190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:15.214991Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:15.227066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:15.530130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:15.635025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.906696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.283554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.574702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.410914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:17.411078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:17.412134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:17.412235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:17.446048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.648242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.905547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.186748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.449666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.731006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.059114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.392830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.807732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.807873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.808298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.808370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.808455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:19.814808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:19.983406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2805:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:20.034948Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2865:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:21.972831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: 2026-02-19T08:28:18.179363Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490319285948666:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.179441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.381184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.402011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.402109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.457839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490319285948635:2081] 1771489698177859 != 1771489698177862 2026-02-19T08:28:18.466288Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:18.470840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:18.597673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:18.597694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:18.597701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:18.597809Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:18.623713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.124553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:19.131711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:19.178634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.187499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.321696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.472284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.535066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.459960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490332170852400:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.460119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.460463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490332170852410:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.460505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.845208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.876252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.910302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.946402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.987602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.047321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.122436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.215015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.306181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336465820583:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.306308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.306422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336465820588:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.308970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336465820590:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.309030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.310674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:22.322842Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490336465820591:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:22.406671Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490336465820643:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:23.185825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490319285948666:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.185897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: 2026-02-19T08:28:18.618720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490320937426447:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.619856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.880168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.927110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.927211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.973842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490320937426411:2081] 1771489698612671 != 1771489698612674 2026-02-19T08:28:18.974858Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:18.985248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.140605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.140627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.140633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.140723Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:19.154911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.624924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:19.631717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:19.636205Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.697210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.882679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.049665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.110228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.187576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490338117297482:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.187670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.187970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490338117297492:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.187999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.540628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.583032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.619035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.660288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.725471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.763952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.796546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.850763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.956406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490338117298358:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.956503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.956975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490338117298363:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.957018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490338117298364:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.957235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.960981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:22.974375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490338117298367:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:23.072650Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490342412265714:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:23.619258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490320937426447:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.619559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:24.606988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:28:24.801151Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7608490346707233401:4048], for# user0@builtin, access# SelectRow 2026-02-19T08:28:24.801299Z node 1 :KQP_EXECUTER ERROR: kqp_table_resolver.cpp:274: TxId: 281474976710675. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 30] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:28:24.813802Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=MWJkMDNmODctNTJmZTJkNjQtNzExNzVjZC1hMTQ4ZWQxYg==, ActorId: [1:7608490346707233372:2542], ActorState: ExecuteState, LegacyTraceId: 01khtg7eps0xms90trz0232sk4, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Failed to resolve table `/Root/.sys/partition_stats` status: AccessDenied." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-02-19T08:28:24.814168Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489704800, txId: 281474976710674] shutting down >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSysColV0::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: 2026-02-19T08:28:16.181956Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490312869143204:2200];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:16.182259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:16.409255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:16.442867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:16.442975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:16.490177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490312869143031:2081] 1771489696173297 != 1771489696173300 2026-02-19T08:28:16.504816Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:16.510055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:16.584689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:16.598107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:16.598133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:16.598142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:16.598277Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:17.015985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:17.021655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:17.036089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:28:17.185416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 1 2026-02-19T08:28:20.207856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330049013407:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.207974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.208326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330049013419:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.208396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490330049013420:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.208558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:20.212861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:20.233174Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490330049013423:2484], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:28:20.347104Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490330049013476:2581] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-02-19T08:28:21.181933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490312869143204:2200];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.182012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:24.891056Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489704886, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: 2026-02-19T08:28:18.611339Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490320422913613:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.611404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.865886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.921219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.921339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.959123Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490320422913576:2081] 1771489698607523 != 1771489698607526 2026-02-19T08:28:18.974254Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:18.974303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.097970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.097992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.097997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.098083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:19.143283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.559495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:19.630035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.658101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.788471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.960687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.033904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.882781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490333307817334:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.882934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.890050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490333307817344:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:21.890132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.227855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.272977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.323739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.367028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.398033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.458500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.498793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.545994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.650044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490337602785513:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.650133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.650558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490337602785519:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.650580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490337602785520:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.650644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.655924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:22.681810Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490337602785523:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:22.751517Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490337602785574:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:23.611517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490320422913613:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.611580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::StreamSelectRowById >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> TLocksTest::GoodNullLock [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: 2026-02-19T08:28:19.394980Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490324890715383:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.396247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.435486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:19.729581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.729681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.746869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.768827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.836388Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.839603Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490324890715202:2081] 1771489699376242 != 1771489699376245 2026-02-19T08:28:19.901335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.901362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.901390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.901502Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:20.013182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.373828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:20.436898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.496797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.677946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.854577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.948788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.100340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342070586266:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.100497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.101450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342070586276:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.101520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.438716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.471446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.506051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.567739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.599987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.672929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.714217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.772246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.851768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342070587151:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.851878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.852215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342070587156:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.852281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342070587157:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.852333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.855766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:23.866592Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490342070587160:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:23.962642Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490342070587211:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:24.393695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490324890715383:2219];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.393769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:25.591349Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7608490350660522125:3979], for# user0@builtin, access# DescribeSchema 2026-02-19T08:28:25.591385Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7608490350660522125:3979], for# user0@builtin, access# DescribeSchema 2026-02-19T08:28:25.607441Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490350660522113:2545], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:28:25.608980Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZDY2ODA3MzctYzYxOGRlMGQtODBhMTBmZi1kMmRiN2U4Yg==, ActorId: [1:7608490350660522106:2541], ActorState: ExecuteState, LegacyTraceId: 01khtg7fgydmdgnzzrsx1stw3e, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/.sys/partition_stats]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: 2026-02-19T08:28:19.039444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490323594953446:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.040043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.062127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:19.311263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.335402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.335521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.377194Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490323594953397:2081] 1771489699017018 != 1771489699017021 2026-02-19T08:28:19.393491Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.393951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.522014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.522033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.522039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.522145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:19.584989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.058400Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:20.085370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.155746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.351920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.510298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.587914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.699691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336479857160:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.699814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.700153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336479857170:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.700208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.148405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.194347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.275876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.306527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.338212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.377929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.422568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.473352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.557544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490340774825341:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.557628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.557929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490340774825346:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.557983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490340774825347:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.558126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.562450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:23.577332Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490340774825350:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:23.658792Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490340774825401:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:24.026091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490323594953446:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.026145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] Test command err: 2026-02-19T08:28:18.597563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490319274011680:2065];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.597646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.886694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.886794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.918978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.941591Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:18.943648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:18.943943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490319274011656:2081] 1771489698596072 != 1771489698596075 2026-02-19T08:28:19.108404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.125807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.125847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.125855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.125987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:19.615972Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.631553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:19.642154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:19.690037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:19.854792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.014151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.086642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.186467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336453882717:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.186566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.187033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336453882727:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.187091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.551410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.596205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.637636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.674483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.721481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.763211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.802095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.852532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.943010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336453883608:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.943091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.943373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336453883613:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.943400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490336453883614:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.943499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:22.947454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:22.964600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490336453883617:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:23.062430Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490340748850964:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:23.597003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490319274011680:2065];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.597091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] >> KqpSystemView::ReadSuccess >> KqpSysColV1::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:08.486370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:08.610089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:08.629054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:08.629144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:08.629674Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:08.904391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:08.904540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:08.988335Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489685458670 != 1771489685458674 2026-02-19T08:28:08.997534Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:09.043260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:09.160123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:09.538310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:09.538382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:09.538428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:09.539000Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:09.551078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:09.882184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:09.970813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.260132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.612897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.890009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.638303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:11.638500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:11.639452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:11.639568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:11.671594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:11.883497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.125993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.404890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.634025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.882955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.144462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.439329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.771859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:13.771946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:13.772253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:13.772304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:13.772345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:13.776731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:13.939713Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2804:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:14.001515Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2864:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:15.352883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.513048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSysColV1::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: 2026-02-19T08:28:19.806644Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490326055055427:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.813526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.840311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:20.131372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.131472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.188139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:20.198949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:20.289881Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.367250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.393044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:20.393066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:20.393073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:20.393145Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:20.818154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:20.962863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.971993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:21.072980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.224142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.458899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.530039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.469966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343234926411:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.470077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.470427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343234926421:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.470492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.753008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.788746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.824842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.860750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.897301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.975710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.023028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.067943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.145893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347529894594:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.145985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.146200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347529894599:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.146259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347529894600:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.146313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.150447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:24.164114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490347529894603:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:24.267464Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490347529894654:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:24.805401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490326055055427:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.805539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:26.239318Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489706230, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: 2026-02-19T08:28:19.963063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490325844278836:2259];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.963208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.256800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.256920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.320014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:20.339157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:20.370211Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.384557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490325844278606:2081] 1771489699938895 != 1771489699938898 2026-02-19T08:28:20.522437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:20.522460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:20.522494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:20.522620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:20.617801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.957856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.126203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:21.162044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:21.212321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.365912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.540197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.636093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.757630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343024149668:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.757760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.758201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343024149678:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.758239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.021927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.055662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.089372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.127765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.187906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.229317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.275789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.318012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.410275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347319117850:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.410374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.410746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347319117855:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.410784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347319117856:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.410891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.415306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:24.427149Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490347319117859:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:24.500102Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490347319117910:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:24.959878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490325844278836:2259];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.959949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::NodesRange2 >> KqpSysColV0::SelectRowAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: 2026-02-19T08:28:19.745724Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490324969929982:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.746337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.154437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.154568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.202224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:20.225846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490324969929926:2081] 1771489699730281 != 1771489699730284 2026-02-19T08:28:20.240035Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.245959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:20.370617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:20.370730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:20.370750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:20.389532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:20.402909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.758196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:20.974348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:21.023609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:21.108605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.327928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.512097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.636809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.574738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342149800992:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.574906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.575388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490342149801002:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.575455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.923053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.955477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.993302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.027983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.057747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.099381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.132314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.181084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.275282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490346444769168:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.275372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.275711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490346444769173:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.275745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490346444769174:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.275845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.280291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:24.296991Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490346444769177:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:24.391189Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490346444769228:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:24.736411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490324969929982:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.736466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:26.595362Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489706616, txId: 281474976715673] shutting down >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: 2026-02-19T08:28:20.845574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490326988498678:2206];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.849950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.229821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.296464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.296571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.386620Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.395716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.441247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:21.600987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.601007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.601013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.601076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:21.846019Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.108801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.205930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.335279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.500070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.588273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.395922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344168369541:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.396094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.396422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344168369551:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.396455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.690699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.727294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.769118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.807915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.845178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.894783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.931569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.984687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.066052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348463337721:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.066202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.066607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348463337726:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.066645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348463337727:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.066781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.070685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.081216Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490348463337730:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:25.174865Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490348463337781:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:25.844720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490326988498678:2206];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.844802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:26.980077Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489707015, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] Test command err: 2026-02-19T08:28:20.956191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490327059992310:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.956234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.280471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.316712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.316836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.379131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.389359Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.417871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490327059992281:2081] 1771489700954727 != 1771489700954730 2026-02-19T08:28:21.568279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:21.584976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.585004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.585010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.585083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:22.017854Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.100772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.171003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.321993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.498134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.574564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.550621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344239863348:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.550766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.551499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344239863358:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.551540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.872987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.908351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.949583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.993504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.026790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.096786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.125187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.169154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.253990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348534831525:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.254079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.254847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348534831530:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.254907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348534831531:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.255043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.259322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.273797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490348534831534:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.368260Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490348534831585:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:25.956379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490327059992310:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.956455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::UpdateAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: 2026-02-19T08:28:21.161075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490331721634341:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.161190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.180954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:21.532534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.532635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.542970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.585456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.762157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.762176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.762181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.762264Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:21.776833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:21.797106Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:22.169907Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.339649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.404214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.547872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.757374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.893602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.695367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344606537927:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.695468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.695756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490344606537937:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.695781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.058115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.131328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.164084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.204898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.249486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.289340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.331284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.385663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.500265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348901506117:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.500369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.500671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348901506122:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.500714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490348901506123:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.500806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.504599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.517081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490348901506126:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.615242Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490348901506177:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:26.169820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490331721634341:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:26.170186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::InnerJoinTables >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpKnn::VectorSearchKnnPushdownFollower-StaleRO [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: 2026-02-19T08:28:21.437301Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490334193220587:2247];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.437379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.479551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:21.777091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.777205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.782425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.823614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.906315Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.909663Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490334193220341:2081] 1771489701358495 != 1771489701358498 2026-02-19T08:28:21.986076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.986103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.986109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.986179Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:22.092011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.442430Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.578812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.589930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:22.688386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.878687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.054557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.142161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.796921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347078124120:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.797008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.797393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347078124130:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.797434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.101281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.140224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.194569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.236375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.270658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.318912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.361074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.447180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.555247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351373092300:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.555366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.558066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351373092305:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.558153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351373092306:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.558287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.563381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.581420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490351373092309:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.663592Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490351373092360:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:26.437433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490334193220587:2247];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:26.437503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2026-02-19T08:27:44.729558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490172788389934:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:44.729660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:44.928698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:44.956554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:44.956652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:44.982904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:45.064685Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:45.067595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490172788389892:2081] 1771489664727621 != 1771489664727624 2026-02-19T08:27:45.203068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:45.273505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:45.354543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.479625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:45.528622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.239481Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490192281364160:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:48.239556Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:48.271054Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:48.369266Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:48.369359Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:48.375943Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:48.376862Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490192281364131:2081] 1771489668238106 != 1771489668238109 2026-02-19T08:27:48.407900Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:48.544596Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:48.587261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:48.606685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.658715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:48.709101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:51.915049Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:51.916432Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:51.916630Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490203254330538:2081] 1771489671797302 != 1771489671797305 2026-02-19T08:27:51.916723Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:51.928851Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:51.928938Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:51.950855Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.088823Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.153291Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:52.159044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:52.170707Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:27:52.174967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.244911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:52.296855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:55.518721Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490223217864682:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:55.519328Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:55.617872Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.637626Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:55.640773Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490223217864622:2081] 1771489675495964 != 1771489675495967 2026-02-19T08:27:55.655158Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:55.655246Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:55.685076Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:55.886714Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:55.887701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:55.906927Z node 4 :FLAT_TX_SCHEMESHARD WARN: scheme ... on type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:08.266185Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:08.283806Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:08.349817Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:08.405173Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.352155Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490294148050047:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:12.352218Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:12.384441Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:12.438801Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:12.440234Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490294148050010:2081] 1771489692351040 != 1771489692351043 2026-02-19T08:28:12.475081Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:12.475159Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:12.477253Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:12.582724Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:12.722565Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:12.742114Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.798852Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.846320Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.039707Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490314002368534:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:17.039767Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:17.059565Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:17.143575Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:17.145587Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490314002368503:2081] 1771489697038251 != 1771489697038254 2026-02-19T08:28:17.163667Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:17.163779Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:17.194536Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:17.326452Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:17.462338Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:17.482981Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.549621Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.611673Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.212248Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490338797056056:2259];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:22.212583Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:22.227265Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:22.388699Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:22.389527Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:22.392838Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490338797055826:2081] 1771489702188813 != 1771489702188816 2026-02-19T08:28:22.410030Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:22.410137Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:22.452424Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:22.682508Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:22.768149Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.782571Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:22.806295Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:28:22.821933Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.910739Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.991816Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.211478Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpKnn::VectorSearchKnnPushdownFollower+StaleRO [GOOD] >> KqpSysColV1::StreamInnerJoinTables |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc >> KqpSysColV1::StreamInnerJoinSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] Test command err: 2026-02-19T08:28:20.451336Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490330363767706:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.451588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.685516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:20.740307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.740441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.806295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:20.807057Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.825859Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490330363767480:2081] 1771489700429713 != 1771489700429716 2026-02-19T08:28:20.909241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.995113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:20.995137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:20.995143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:20.995230Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:21.449845Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.549731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:21.557861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:21.642281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.816924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.009613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.089387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.233173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347543638542:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.233317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.238584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347543638552:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.238700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.633599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.669030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.768480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.802147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.836548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.880332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.924690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.975766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.068943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351838606722:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.069015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.069074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351838606727:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.069366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351838606729:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.069411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.073513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.088314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490351838606730:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.163507Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490351838606782:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:25.451317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490330363767706:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.451377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: 2026-02-19T08:28:21.400974Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490334582393564:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.401305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.506207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:21.724491Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.729306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490334582393513:2081] 1771489701392168 != 1771489701392171 2026-02-19T08:28:21.780945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.781072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.789508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.797057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.979513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.979544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.979551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.979662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:22.033784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.429935Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.575833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.582348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:22.651976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.840924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.037750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.120793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.050555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351762264585:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.050671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.051179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351762264595:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.051215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.367851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.404202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.451509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.489817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.547851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.595982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.653035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.708634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.787735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351762265470:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.787848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.788245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351762265475:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.788314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490351762265476:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.788447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.792339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.807365Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490351762265479:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.875019Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490351762265530:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:26.400465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490334582393564:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:26.400561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::QueryStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdownFollower-StaleRO [GOOD] Test command err: 2026-02-19T08:28:11.736144Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490288764052831:2146];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:11.736245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:11.921893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:11.946125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:11.946250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:11.985235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:12.006441Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:12.007644Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490288764052723:2081] 1771489691730889 != 1771489691730892 2026-02-19T08:28:12.086407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:12.086428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:12.086434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:12.086545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:12.193553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:12.480366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:12.541292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.681398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.781957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:12.823561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:12.893298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:14.715113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490301648956498:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:14.715259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:14.715621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490301648956508:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:14.715687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.011567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.039582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.066749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.095458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.124748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.159400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.192643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.234418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.306300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490305943924675:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.306352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.306493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490305943924680:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.306545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490305943924681:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.306601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.310417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:15.321917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490305943924684:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:15.379755Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490305943924735:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:16.735939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490288764052831:2146];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:16.736009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:16.842303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.060253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' ... SELECT from partition_stats for /Root/TestTable , attempt 1 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' 2026-02-19T08:28:26.902022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:28:26.902064Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... SELECT from partition_stats for /Root/TestTable , attempt 2 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] >> KqpSystemView::PartitionStatsRange2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdownFollower+StaleRO [GOOD] Test command err: 2026-02-19T08:28:12.137644Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490293869727760:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:12.138102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:12.363789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:12.363909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:12.401630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:12.415285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:12.455055Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:12.457724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490293869727710:2081] 1771489692134809 != 1771489692134812 2026-02-19T08:28:12.530295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:12.530316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:12.530324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:12.530441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:12.606312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:12.895088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:12.939540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.060325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.161234Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:13.191889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:13.256207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.334556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490306754631474:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.334655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.334928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490306754631484:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.334976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.641609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.674765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.707662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.737423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.780816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.811802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.847690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.885698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.957895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490306754632350:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.958007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.958083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490306754632355:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.958309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490306754632357:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.958346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:15.961689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:15.972482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490306754632358:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:16.076291Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490311049599706:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:17.138074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490293869727760:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:17.139618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:17.497523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.687379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId = 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' ... SELECT from partition_stats for /Root/TestTable , attempt 0 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' ... SELECT from partition_stats for /Root/TestTable , attempt 1 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' 2026-02-19T08:28:27.292487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:28:27.292522Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... SELECT from partition_stats for /Root/TestTable , attempt 2 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/TestTable' >> KqpSystemView::NodesSimple >> KqpSystemView::NodesOrderByDesc [GOOD] >> KqpSysColV0::SelectRange >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> KqpSystemView::PartitionStatsFollower >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:13.402730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:13.476938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:13.492098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:13.492167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:13.492586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:13.773177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:13.773297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:13.842157Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489690938513 != 1771489690938517 2026-02-19T08:28:13.849606Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:13.896550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:14.024575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:14.369150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:14.369208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:14.369241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:14.369765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:14.382663Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:14.685120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:14.748664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.020679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.358133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.642512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.486468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1910:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:16.486671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:16.487819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:16.487954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:16.519875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.743881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.996785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.280041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.531317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:17.803629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.111808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.420559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:18.794300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.794499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.794907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.795342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.795406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:18.801768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:18.983146Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2806:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:19.041073Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2866:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:20.816058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.802754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSystemView::NodesRange1 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardTxOrder::ReadWriteReorder |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [GOOD] |94.4%| [TA] $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] Test command err: 2026-02-19T08:28:18.424550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490319595874611:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.425114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.468739Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490321404987793:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.469667Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.502765Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490319606793689:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.533059Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490319160738051:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.533128Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.552446Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.587929Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608490320292176968:2193];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.588101Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.073835Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.074154Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.074395Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.087039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.109795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.183444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.183569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.185971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.186961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.190391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.190474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.190618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.190692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.190805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:19.190842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.325757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.326304Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:28:19.330198Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:19.330228Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:28:19.330241Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:28:19.369489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.370576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.370730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.370818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.421286Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.426262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.422930Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:19.437914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.493794Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.498605Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.498673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.504114Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.549911Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.564764Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.600885Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.921811Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.976761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.046623Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:20.048576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:20.048603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:20.048611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:20.048716Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:21.699931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:21.896856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.617031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.168605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.339180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.456536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490319595874611:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.464024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.483708Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490321404987793:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.484243Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.510405Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608490319606793689:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.510687Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.532794Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608490319160738051:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.532848Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.588232Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608490320292176968:2193];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.588292Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:25.631642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490349660647821:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.631776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.632140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490349660647831:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.632186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.078979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.213545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.282461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.380605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.465381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.532946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.601819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.675192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.814359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490353955616108:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.814458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.814782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490353955616113:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.814783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490353955616114:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.814837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.818121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:26.845840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490353955616117:2410], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725670 completed, doublechecking } 2026-02-19T08:28:26.909325Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490353955616192:4586] txid# 281474976725671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> KqpSysColV0::UpdateAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2026-02-19T08:22:54.789032Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007345s 2026-02-19T08:22:54.991400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:54.991473Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:55.256900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:22:56.513478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:22:56.714613Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:56.715201Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:56.715765Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8544791310528981681 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:56.749562Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:56.750315Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:56.750897Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13388774305278510700 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:56.822253Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:56.822767Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:56.822978Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12069467042102471503 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-02-19T08:22:56.826657Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-02-19T08:22:56.903603Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:922} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-02-19T08:22:56.904439Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:3014} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-02-19T08:22:56.904708Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:270} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ynks/003701/r3tmp/tmphGqg7t/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6224529048003678861 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# ... 2026-02-19T08:27:20.105908Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:20.106268Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:26.357221Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:26.357598Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:32.467140Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:32.467538Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:32.623746Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:32.624113Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:38.723996Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:38.724389Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:45.199635Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:45.200056Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:51.580848Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:51.581255Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:57.903432Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:27:57.903866Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:04.242419Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:04.242700Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:10.741212Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:10.741630Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:16.983742Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:16.984069Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:23.836063Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:23.836454Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:30.463311Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-02-19T08:28:30.463678Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} >> KqpSysColV1::StreamSelectRowById [GOOD] |94.4%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: 2026-02-19T08:28:19.877998Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490325941824191:2093];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.878545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.977857Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490325581489714:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.989569Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490324570288583:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:19.978010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:19.989608Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.129385Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608490328258944833:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.129454Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.894050Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.048103Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.048281Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:21.055374Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.061380Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.057848Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.057951Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.057983Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:21.059999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.065842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:21.057961Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:21.125847Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.134070Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.134246Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:21.179268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.179366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.181224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.181272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.186591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.186658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.187071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.187126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.201383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.201846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.367659Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:28:21.367778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.373434Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:21.395022Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:21.395064Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:28:21.399026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.406946Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.406957Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.407196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.420168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.420307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.422706Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.426442Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.467398Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.489451Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:28:21.500835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.793593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:21.803160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.113109Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.126965Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.175188Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:22.310389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:22.310423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:22.310437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:22.310519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:23.874570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:24.017178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.328403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.579323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.706069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.876148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490325941824191:2093];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.876210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:24.940911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490325581489714:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.940964Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:24.990325Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608490324570288583:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:24.990455Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:25.130919Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608490328258944833:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.130999Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:26.934855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490356006597433:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.934967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.935301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490356006597443:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:26.935338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.326098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.375384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.414838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.516408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.576300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.639376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.689037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.761442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.848079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490360301565706:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.848170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.848454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490360301565711:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.848527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490360301565712:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.848594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:27.852367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:27.872741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490360301565715:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:27.931675Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490360301565790:4613] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:29.928330Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489709917, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] Test command err: 2026-02-19T08:28:26.466863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490355854777241:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:26.466943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:26.692580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:26.721895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:26.721995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:26.785082Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:26.790081Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490355854777130:2081] 1771489706460865 != 1771489706460868 2026-02-19T08:28:26.803095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:26.892259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:26.892287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:26.892293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:26.892397Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:26.974079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:27.270103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:27.323603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.472392Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:27.484644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.651578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.728758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.582628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490368739680896:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.582725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.583384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490368739680906:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.583439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.869726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.909443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.946584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.986704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.024207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.098557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.162238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.226042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.336978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490373034649083:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.337194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490373034649088:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.337310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.337661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490373034649091:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.337722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.341393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:30.355274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490373034649090:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:30.463774Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490373034649143:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:31.469846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490355854777241:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:31.474946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:32.075229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: 2026-02-19T08:28:25.180567Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490351128738292:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.180643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:25.197696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:25.572120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:25.572244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:25.634553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:25.640195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:25.735229Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:25.741697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490351128738056:2081] 1771489705151984 != 1771489705151987 2026-02-19T08:28:25.798845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:25.798873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:25.798883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:25.798966Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:25.875038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:26.166259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:26.221525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:26.226733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:26.286801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.436415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.628184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.714034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.635678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490364013641818:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:28.635800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:28.636090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490364013641828:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:28.636157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:28.959459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.989220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.018693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.047634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.074378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.110791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.145251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.215841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.301812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490368308609998:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.301901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.302120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490368308610003:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.302158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490368308610004:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.302281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:29.305810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:29.316499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490368308610007:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:29.397056Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490368308610058:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:30.176248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490351128738292:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.176308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::ReadWriteReorder [GOOD] >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: 2026-02-19T08:28:27.439517Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490357401450182:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:27.439565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:27.652287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:27.652413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:27.690186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:27.705574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:27.724351Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:27.741832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490357401450151:2081] 1771489707438036 != 1771489707438039 2026-02-19T08:28:27.839523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:27.839558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:27.839567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:27.839645Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:27.987907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:28.245360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:28.296279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.430901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.535857Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:28.587947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.658054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.593554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490370286353923:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.593673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.597831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490370286353933:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.597908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:30.952601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.000810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.049606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.105159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.145647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.195462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.280337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.339457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.473770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490374581322109:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.474130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.477047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490374581322114:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.477107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490374581322115:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.477256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.481399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:31.495000Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490374581322118:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:31.581002Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490374581322171:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:32.444734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490357401450182:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.444810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:33.423711Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489713462, txId: 281474976710673] shutting down >> KqpSysColV0::SelectRowAsterisk [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost >> TLocksTest::CK_Range_GoodLock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: 2026-02-19T08:28:28.150370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490364745580474:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:28.151955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:28.188168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:28.424417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:28.424530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:28.429171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:28.468913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:28.507211Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:28.508737Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490364745580437:2081] 1771489708147190 != 1771489708147193 2026-02-19T08:28:28.557030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:28.557052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:28.557065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:28.557142Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:28.661110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:28.942867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:28.950182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:29.020893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.151642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.151780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:29.317479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.389757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.386240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490377630484208:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.386389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.388738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490377630484218:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.388798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:31.717403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.752426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.785829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.813766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.842211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.876501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.930996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.978193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.070890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381925452385:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.070976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.071451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381925452391:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.071497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381925452390:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.071528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.075600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:32.089517Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490381925452394:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:32.180441Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490381925452447:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:33.151762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490364745580474:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:33.151841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:33.754048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:28:33.924935Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489713918, txId: 281474976710674] shutting down >> KqpSysColV0::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2026-02-19T08:28:33.881845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:33.881913Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:33.882806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:33.897203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:33.897563Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:33.897823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:33.946687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:33.962098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:33.962294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:33.970423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:33.970508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:33.970566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:33.971060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:33.971175Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:33.971267Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:34.050164Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:34.095229Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:34.095463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:34.095573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:34.095634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:34.095675Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:34.095710Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:34.095901Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.095951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.096281Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:34.096402Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:34.096533Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:34.096594Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:34.096660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:34.096700Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:34.096746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:34.096782Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:34.096834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:34.096947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.096993Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.097037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:34.104827Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:34.104920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:34.105037Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:34.105231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:34.105304Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:34.105379Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:34.105425Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:34.105461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:34.105499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:34.105532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:34.105967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:34.106012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:34.106058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:34.106091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:34.106167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:34.106197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:34.106231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:34.106267Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:34.106305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:34.121763Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:34.121858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:34.121913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:34.121959Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:34.122045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:34.122625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.122687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.122735Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:34.122865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:34.122916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:34.123132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:34.123180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:34.123223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:34.123265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:34.127375Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:34.127476Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:34.127755Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.127799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.127868Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:34.127931Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:34.127972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:34.128017Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:34.128056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000001: ... ions 2026-02-19T08:28:35.172020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-02-19T08:28:35.172235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:238:2229], Recipient [1:238:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:35.172295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:35.172350Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:35.172408Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:35.172439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:35.172470Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2026-02-19T08:28:35.172500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2026-02-19T08:28:35.172530Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.172554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2026-02-19T08:28:35.172577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2026-02-19T08:28:35.172633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2026-02-19T08:28:35.173509Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2026-02-19T08:28:35.173557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.173586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2026-02-19T08:28:35.173613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2026-02-19T08:28:35.173638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2026-02-19T08:28:35.173730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.173761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2026-02-19T08:28:35.173799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:35.173826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2026-02-19T08:28:35.173902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:12] is the new logically complete end at 9437184 2026-02-19T08:28:35.173936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:12] is the new logically incomplete end at 9437184 2026-02-19T08:28:35.173965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:12] at 9437184 2026-02-19T08:28:35.173999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:35.174045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2026-02-19T08:28:35.174069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2026-02-19T08:28:35.174118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2026-02-19T08:28:35.174198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2026-02-19T08:28:35.174231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2026-02-19T08:28:35.174259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2026-02-19T08:28:35.174310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2026-02-19T08:28:35.174333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2026-02-19T08:28:35.174363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2026-02-19T08:28:35.174474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2026-02-19T08:28:35.174496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2026-02-19T08:28:35.174519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2026-02-19T08:28:35.174585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BlockFailPoint 2026-02-19T08:28:35.174623Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BlockFailPoint 2026-02-19T08:28:35.174662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.174688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BlockFailPoint 2026-02-19T08:28:35.174711Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2026-02-19T08:28:35.174734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2026-02-19T08:28:35.175254Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2026-02-19T08:28:35.175314Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:35.175377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.175411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:28:35.175437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2026-02-19T08:28:35.175463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2026-02-19T08:28:35.175628Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is DelayComplete 2026-02-19T08:28:35.175656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2026-02-19T08:28:35.175683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2026-02-19T08:28:35.175725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2026-02-19T08:28:35.175762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-02-19T08:28:35.175785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2026-02-19T08:28:35.175811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:12] at 9437184 has finished 2026-02-19T08:28:35.175852Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:35.175882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:35.175911Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:35.175942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:35.190572Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2026-02-19T08:28:35.190682Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2026-02-19T08:28:35.190970Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:35.191035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2026-02-19T08:28:35.191106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:35.191160Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-02-19T08:28:35.191740Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2026-02-19T08:28:35.191792Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-02-19T08:28:35.191834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:35.191879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2026-02-19T08:28:35.191937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:35.191979Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardScan::ScanFollowedByUpdate >> KqpSystemView::Join [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] >> DataShardTxOrder::RandomDotRanges_DelayRS >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] Test command err: 2026-02-19T08:28:30.479559Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490372980956283:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.481462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:30.520576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:30.840324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:30.855398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.855530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.896605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.957227Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.964524Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490372980956220:2081] 1771489710471861 != 1771489710471864 2026-02-19T08:28:31.097776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:31.158401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:31.158423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:31.158435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:31.158533Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:31.497944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:31.707681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:31.714863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:31.727216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:28:34.286040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490390160826373:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.286199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.293846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490390160826385:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.293932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490390160826386:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.294120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.301221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:34.329922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490390160826389:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:28:34.471855Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490390160826440:2578] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: 2026-02-19T08:28:28.848302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490364170798093:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:28.849136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.000400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.014288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.014408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:29.087093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:29.090440Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:29.093078Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490364170798049:2081] 1771489708844064 != 1771489708844067 2026-02-19T08:28:29.190446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:29.210975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:29.210998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:29.211004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:29.211109Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:29.674674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:29.689391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:29.758269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:29.858914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:29.961745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.144604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.223787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.366709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381350669111:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.366823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.367635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381350669121:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.367699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.779700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.820874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.878118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.925781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.968464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.001200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.041824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.090105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.194051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385645637283:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.194129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.194456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385645637288:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.194502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385645637289:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.194610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.205346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:33.228516Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490385645637292:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:33.321464Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490385645637343:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:33.849818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490364170798093:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:33.849888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2026-02-19T08:27:52.211085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490209839068441:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:52.211372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:52.513832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.545800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.545914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.630440Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:52.633721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.634053Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490209839068335:2081] 1771489672200624 != 1771489672200627 2026-02-19T08:27:52.737791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.894004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:52.965571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.088573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.154673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.220670Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:55.928725Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490220395041311:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:55.928777Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:55.959934Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.034879Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:56.037907Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490220395041272:2081] 1771489675927293 != 1771489675927296 2026-02-19T08:27:56.046492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.046572Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.061381Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.176382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.258395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:56.265385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:56.280026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:27:56.284437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.354726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.428660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.408858Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:59.440318Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490236988613016:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:59.478261Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:59.529801Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.604655Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:59.608824Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490236988612771:2081] 1771489679354433 != 1771489679354436 2026-02-19T08:27:59.634322Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:59.634394Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:59.644569Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:59.795062Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.822228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:59.839693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.920710Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.964475Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.228674Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490255627425946:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:03.229055Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:03.254529Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.321908Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:03.354924Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:03.354993Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:03.363424Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:03.552391Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.598168Z node 4 ... undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:15.873658Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:15.883582Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.944807Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:15.989674Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.158702Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490328523950549:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.159446Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.207127Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:20.293140Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.293239Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.306036Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490328523950506:2081] 1771489700156559 != 1771489700156562 2026-02-19T08:28:20.326507Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.329924Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:20.377579Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:20.628233Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.636358Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:20.655005Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.733218Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.804864Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.415914Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7608490349015895057:2204];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:25.457593Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:25.469847Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:25.616819Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:25.621861Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490349015894869:2081] 1771489705391791 != 1771489705391794 2026-02-19T08:28:25.633969Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:25.634082Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:25.671207Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:25.744271Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:25.978215Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:25.987047Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:26.008890Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.083248Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.146899Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.401912Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490373323761016:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.413196Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:30.454032Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:30.549988Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.553294Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490373323760972:2081] 1771489710396959 != 1771489710396962 2026-02-19T08:28:30.563533Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.563646Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.597530Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.641803Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:30.916242Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:30.930125Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:30.946105Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:30.951785Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.032060Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.093330Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: 2026-02-19T08:28:29.053727Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490365820393897:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.055477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.261934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.287868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.287969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:29.328908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490365820393847:2081] 1771489709049709 != 1771489709049712 2026-02-19T08:28:29.347225Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:29.355719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:29.466797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:29.478387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:29.478411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:29.478426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:29.478516Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:29.981703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:29.989520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:30.053493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.060602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.236560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.398970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.476668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.518154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490378705297616:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.518280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.518656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490378705297626:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.518731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.864595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.906387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.946391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.984939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.019371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.078242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.112063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.168618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.257950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383000265792:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.258007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.258898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383000265798:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.258942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383000265797:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.258963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.262314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:33.275546Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490383000265801:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:33.334334Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490383000265852:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:34.051917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490365820393897:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.051984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> TxOrderInternals::OperationOrder [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: 2026-02-19T08:28:29.527968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490365539109141:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.528018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.870697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.888547Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:29.889694Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490365539109104:2081] 1771489709516617 != 1771489709516620 2026-02-19T08:28:29.903598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.903725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:29.935629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.072510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:30.072532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:30.072540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:30.072640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:30.144380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.545597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.615510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:30.621868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:30.676234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.844349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.015492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.093619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.211890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490382718980172:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.212145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.212573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490382718980182:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.212652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.521316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.563980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.590722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.620871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.651784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.682617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.713875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.772013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.842777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490382718981053:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.842876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.843170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490382718981058:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.843221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490382718981059:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.843319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.846360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:33.856939Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490382718981062:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:33.936036Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490382718981113:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:34.528909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490365539109141:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.528979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: 2026-02-19T08:28:21.193863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490334944016950:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:21.193929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:21.479911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:21.521443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:21.521574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:21.611972Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490334944016721:2081] 1771489701146653 != 1771489701146656 2026-02-19T08:28:21.623555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.624202Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:21.733888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:21.762207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:21.762225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:21.762234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:21.762296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:22.198218Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:22.287969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:22.298372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:22.362598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.504255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.685963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:22.763757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.629597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347828920495:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.629702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.633856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490347828920505:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.633947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.017697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.052508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.093038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.124866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.153405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.206687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.240664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.286952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:25.391164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490352123888672:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.391260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.391530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490352123888677:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.391576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490352123888678:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.391605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:25.394944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:25.405326Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490352123888681:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:25.479137Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490352123888732:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:26.190113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490334944016950:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:26.190252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:27.111844Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489707106, txId: 281474976710673] shutting down waiting... 2026-02-19T08:28:28.278473Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489708273, txId: 281474976710675] shutting down waiting... 2026-02-19T08:28:29.475365Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489709469, txId: 281474976710677] shutting down waiting... 2026-02-19T08:28:30.687424Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489710680, txId: 281474976710679] shutting down waiting... 2026-02-19T08:28:31.935202Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489711930, txId: 281474976710681] shutting down waiting... 2026-02-19T08:28:33.134476Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489713128, txId: 281474976710683] shutting down waiting... 2026-02-19T08:28:34.306167Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489714302, txId: 281474976710685] shutting down waiting... 2026-02-19T08:28:35.504443Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489715499, txId: 281474976710687] shutting down 2026-02-19T08:28:35.905982Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489715897, txId: 281474976710689] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2026-02-19T08:27:52.302096Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490209370825475:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:52.302178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:52.557834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.605209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:52.605358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:52.672142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490209370825366:2081] 1771489672294357 != 1771489672294360 2026-02-19T08:27:52.688542Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:52.692487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:52.824830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:52.915872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:52.922525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:53.003286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.137525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.201185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:53.305693Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:55.988411Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490223001540810:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:55.991417Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:56.013001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.079245Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:56.082253Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490223001540780:2081] 1771489675986905 != 1771489675986908 2026-02-19T08:27:56.091023Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:56.091101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:56.146029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:56.184036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:56.324483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:56.334688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:56.348848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.400056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:56.482590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.474476Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490238443700314:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:59.474599Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:59.573793Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.618250Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:59.618342Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:59.631905Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:59.634987Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:59.764619Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.882465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:59.887588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:27:59.904870Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.959829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:00.005235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.084557Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490256053311829:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:03.084710Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:03.109388Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.188080Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:03.191382Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490256053311783:2081] 1771489683083174 != 1771489683083177 2026-02-19T08:28:03.227135Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:03.227219Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:03.232776Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:03.319145Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.430757Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__oper ... T08:28:15.982010Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.042865Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:16.100486Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:20.411807Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490329637418481:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:20.411872Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:20.503178Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:20.665841Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:20.677523Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:20.677631Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:20.681107Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:20.689886Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490329637418438:2081] 1771489700400722 != 1771489700400725 2026-02-19T08:28:20.734582Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:21.009748Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:21.018138Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:21.039133Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.115573Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.171368Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.176460Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:21.453420Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:25.504346Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:25.504460Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:25.631520Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:25.631639Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:25.645965Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490350514895249:2081] 1771489705466990 != 1771489705466993 2026-02-19T08:28:25.671021Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:25.683321Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:25.793977Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:25.950222Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:25.962073Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:25.981444Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.068868Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:26.140421Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.708106Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490369921526666:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.708854Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:30.741039Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:30.913947Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490369921526612:2081] 1771489710702419 != 1771489710702422 2026-02-19T08:28:30.944679Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.961668Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:30.970936Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.971054Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.987558Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:31.266821Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:31.286012Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:31.294808Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.393056Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.443894Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:31.470009Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.710330Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpSystemView::PartitionStatsRanges [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> KqpSysColV1::StreamInnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: 2026-02-19T08:28:29.677408Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490365966738542:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.677454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.723155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:30.014259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.014382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.056201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.061273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:30.083583Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.091827Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490365966738513:2081] 1771489709676150 != 1771489709676153 2026-02-19T08:28:30.174371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:30.174392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:30.174398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:30.175512Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:30.251995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.713381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.733193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:30.742728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:30.795270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.996612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.178911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.254153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.123948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383146609576:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.124040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.124915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383146609586:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.124960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.408989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.449472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.487833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.520740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.557613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.642153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.730597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.781253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.863149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383146610467:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.863240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.863585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383146610472:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.863651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490383146610473:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.863786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.868027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:33.887695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490383146610476:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:33.991843Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490383146610527:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:34.677600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490365966738542:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.677688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> KqpSystemView::QueryStatsScan [GOOD] >> THealthCheckTest::TestStateStorageOk [GOOD] >> THealthCheckTest::TestStateStorageBlue |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSysColV0::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: 2026-02-19T08:28:29.415348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490368292611519:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.417651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.470987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:29.741793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.758432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.758556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:29.799746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:29.839343Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:29.841486Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490368292611486:2081] 1771489709412508 != 1771489709412511 2026-02-19T08:28:29.910515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:29.910529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:29.910534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:29.910593Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:30.044342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.423631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.481903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:30.490637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:30.550336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.757412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:30.936272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.036551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.826910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381177515248:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.827040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.831575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490381177515258:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:32.831728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.168262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.207081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.257786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.288287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.328558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.388902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.472385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.514021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.591326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385472483430:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.591399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.592119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385472483435:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.592168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490385472483436:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.592273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:33.596233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:33.608612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490385472483439:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:33.681384Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490385472483490:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:34.414768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490368292611519:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.414884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardScan::ScanFollowedByUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: 2026-02-19T08:28:30.344388Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490373379459335:2212];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.344497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:30.569648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:30.654475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.654610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.664293Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.665477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490373379459161:2081] 1771489710331047 != 1771489710331050 2026-02-19T08:28:30.684105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:31.009775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:31.030313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:31.030334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:31.030343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:31.030420Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:31.346977Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:31.635777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:31.642325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:31.720292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:31.928664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.112239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.188535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.241131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490390559330224:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.241266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.241738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490390559330234:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.241805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.618259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.661539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.729332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.776484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.810239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.847616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.891014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.942691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.031784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394854298400:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.031897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.032321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394854298405:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.032375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394854298406:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.032510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.036171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:35.053459Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490394854298409:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:35.126596Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490394854298460:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:35.343064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490373379459335:2212];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:35.343132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.097281Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489717130, txId: 281474976710673] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: 2026-02-19T08:28:31.006457Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490377053393515:2189];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:31.006546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:31.053198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:31.343817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:31.343940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:31.410665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:31.427362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:31.464143Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:31.673790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:31.695908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:31.695928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:31.695934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:31.696029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:32.011261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:32.301984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:32.318587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:32.391189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.587938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.794608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.883528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.778998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389938297097:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.779086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.779811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389938297106:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.779854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.106388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.139262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.172917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.202939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.280234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.320218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.352140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.431042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.500470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394233265277:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.500525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.501335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394233265282:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.501369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394233265283:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.501457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.505271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:35.516221Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490394233265286:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:35.613379Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490394233265337:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.007443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490377053393515:2189];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:36.007524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.432429Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489717426, txId: 281474976710673] shutting down >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: 2026-02-19T08:28:30.555256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490372557166583:2061];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:30.555295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:30.966198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:31.003834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:31.003933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:31.083716Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:31.089633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490372557166561:2081] 1771489710544105 != 1771489710544108 2026-02-19T08:28:31.109127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:31.306412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:31.330513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:31.330540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:31.330546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:31.330641Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:31.570058Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:31.911825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:31.934303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:31.988740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.132314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.311287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.401698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.323452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389737037632:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.323555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.323906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389737037642:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.323948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.616728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.650435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.678878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.712560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.743849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.789617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.846385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.897741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.968972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389737038513:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.969045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.969301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389737038518:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.969337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490389737038519:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.969432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.972582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:34.982890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490389737038522:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:35.072212Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490394032005869:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:35.555766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490372557166583:2061];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:35.555827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.298846Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489717333, txId: 281474976715673] shutting down >> DataShardTxOrder::ImmediateBetweenOnline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: 2026-02-19T08:28:31.138326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490375757246847:2217];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:31.138407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:31.527694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:31.564431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:31.564735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:31.634405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:31.643824Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:31.660207Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490375757246659:2081] 1771489711102814 != 1771489711102817 2026-02-19T08:28:31.787858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:31.814659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:31.814678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:31.814687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:31.814750Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:32.140662Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:32.389434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:32.452968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.622577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.829035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.920561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.728860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490388642150434:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.729019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.729446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490388642150444:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:34.729520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.004085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.033340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.067576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.098565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.130689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.166807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.267648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.312529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.403581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392937118614:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.403649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.403914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392937118619:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.403953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392937118620:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.404075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.407122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:35.417980Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490392937118623:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:35.486385Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490392937118674:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.135047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490375757246847:2217];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:36.135122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.674879Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489717284, txId: 281474976715673] shutting down 2026-02-19T08:28:37.784746Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489717780, txId: 281474976715676] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: 2026-02-19T08:28:31.640295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490375448688428:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:31.640705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:31.921562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.057424Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:32.061425Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490375448688269:2081] 1771489711574869 != 1771489711574872 2026-02-19T08:28:32.077794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:32.077873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:32.082787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:32.237884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:32.292919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:32.292939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:32.292948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:32.293091Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:32.645878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:32.787339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:32.803122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:32.878069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.145213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.323637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.399530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.343772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392628559338:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.343885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.344284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392628559348:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.344364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.710959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.749988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.786066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.900145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.934907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.981694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.022710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.078026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.173767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490396923527526:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.173882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.174357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490396923527531:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.174417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490396923527532:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.174535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.179208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:36.195808Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490396923527535:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:36.290322Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490396923527586:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.636921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490375448688428:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:36.636991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:38.073146Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489718066, txId: 281474976715673] shutting down >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> DataShardOutOfOrder::TestSnapshotReadPriority ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2026-02-19T08:28:36.668507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:36.668565Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:36.669532Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:36.682073Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:36.682440Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:36.682694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:36.727472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:36.736666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:36.736858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:36.738358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:36.738476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:36.738531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:36.738903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:36.739003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:36.739081Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:36.813120Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:36.845764Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:36.845985Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:36.846104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:36.846169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:36.846202Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:36.846234Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:36.846398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:36.846442Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:36.846736Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:36.846834Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:36.846950Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:36.846989Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:36.847047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:36.847093Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:36.847151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:36.847197Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:36.847248Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:36.847347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.847389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.847428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:36.854172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:36.854291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:36.854387Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:36.854578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:36.854638Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:36.854707Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:36.854748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:36.854790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:36.854834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:36.854863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:36.855142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:36.855178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:36.855214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:36.855245Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:36.855312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:36.855341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:36.855382Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:36.855414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:36.855450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:36.874537Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:36.874629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:36.874689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:36.874732Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:36.874829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:36.875386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.875439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.875477Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:36.875631Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:36.875663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:36.875796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:36.875839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:36.875870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:36.875922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:36.879516Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:36.879583Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:36.879826Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:36.879873Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:36.879926Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:36.879962Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:36.880003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:36.880058Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:36.880094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... on::Execute at 9437186 2026-02-19T08:28:38.883860Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:38.883924Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2026-02-19T08:28:38.883960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2026-02-19T08:28:38.883998Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2026-02-19T08:28:38.884078Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-02-19T08:28:38.884110Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2026-02-19T08:28:38.884141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2026-02-19T08:28:38.884169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2026-02-19T08:28:38.884434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is DelayComplete 2026-02-19T08:28:38.884473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2026-02-19T08:28:38.884520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2026-02-19T08:28:38.884562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2026-02-19T08:28:38.884617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-02-19T08:28:38.884648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2026-02-19T08:28:38.884683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437186 has finished 2026-02-19T08:28:38.884741Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:38.884780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-02-19T08:28:38.884821Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-02-19T08:28:38.884865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-02-19T08:28:38.885075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:238:2229], Recipient [1:238:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.885112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.885162Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:38.885193Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:38.885228Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2026-02-19T08:28:38.885254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2026-02-19T08:28:38.885285Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2026-02-19T08:28:38.885320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-02-19T08:28:38.885368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2026-02-19T08:28:38.885399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2026-02-19T08:28:38.885430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2026-02-19T08:28:38.885596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is DelayComplete 2026-02-19T08:28:38.885627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2026-02-19T08:28:38.885655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2026-02-19T08:28:38.885683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2026-02-19T08:28:38.885740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-02-19T08:28:38.885767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2026-02-19T08:28:38.885794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437184 has finished 2026-02-19T08:28:38.885821Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:38.885844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:38.885870Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:38.885918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:38.886120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:348:2314], Recipient [1:348:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.886163Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.886211Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-02-19T08:28:38.886241Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:38.886272Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2026-02-19T08:28:38.886299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2026-02-19T08:28:38.886325Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2026-02-19T08:28:38.886359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-02-19T08:28:38.886392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2026-02-19T08:28:38.886419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2026-02-19T08:28:38.886445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2026-02-19T08:28:38.886626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is DelayComplete 2026-02-19T08:28:38.886659Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2026-02-19T08:28:38.886686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2026-02-19T08:28:38.886716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2026-02-19T08:28:38.886756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-02-19T08:28:38.886784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2026-02-19T08:28:38.886810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437185 has finished 2026-02-19T08:28:38.886838Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:38.886860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:28:38.886886Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-02-19T08:28:38.886909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-02-19T08:28:38.900150Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:38.900219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:38.900252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2026-02-19T08:28:38.900362Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:104:2137], exec latency: 4 ms, propose latency: 5 ms 2026-02-19T08:28:38.900469Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-02-19T08:28:38.901974Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:38.902027Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:38.902059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2026-02-19T08:28:38.902111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-02-19T08:28:38.902151Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:38.902298Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:38.902341Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:38.902369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2026-02-19T08:28:38.902434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 4 ms, propose latency: 6 ms 2026-02-19T08:28:38.902471Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: 2026-02-19T08:28:32.107224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490380105008116:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.107690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.398200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.409037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:32.409134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:32.506635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:32.508839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490380105008056:2081] 1771489712092966 != 1771489712092969 2026-02-19T08:28:32.536568Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:32.777350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:32.846279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:32.846305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:32.846313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:32.846377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:33.123192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.344702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:33.396965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.585991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.745579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.820021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.751272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392989911832:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.751369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.755506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392989911841:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.755592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.086486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.121333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.157052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.191051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.224689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.290489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.343230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.393955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.469357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397284880011:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.469493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.469602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397284880016:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.469914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397284880018:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.469971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.473673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:36.485848Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490397284880019:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:36.570795Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490397284880071:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:37.103844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490380105008116:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.103916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: 2026-02-19T08:28:31.420176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490374208005398:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:31.420221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:31.486743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:31.828803Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490374208005349:2081] 1771489711410910 != 1771489711410913 2026-02-19T08:28:31.841886Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:31.845083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:31.845180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:31.848545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:31.868158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:32.034399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:32.034437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:32.034445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:32.034540Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:32.097659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:32.445849Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:32.565131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:32.631434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.820956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.020026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.122999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.186646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490391387876403:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.186787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.187206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490391387876413:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.187291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.529779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.560251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.592557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.624091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.664705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.698876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.738367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.796801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.886245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490391387877287:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.886332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.886599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490391387877292:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.886621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490391387877293:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.886642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.890410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:35.901678Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490391387877296:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:28:36.007752Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490395682844643:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.420683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490374208005398:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:36.420770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.515616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> DataShardTxOrder::ZigZag_oo >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> DataShardTxOrder::DelayData >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardTxOrder::ImmediateBetweenOnline_Init >> KqpSystemView::NodesSimple [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardTxOrder::ZigZag >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> KqpSystemView::NodesRange2 [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> TLocksTest::BrokenSameShardLock [GOOD] >> DataShardTxOrder::RandomPointsAndRanges >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardTxOrder::RandomPoints_DelayData >> THealthCheckTest::TestStateStorageYellow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: 2026-02-19T08:28:32.293486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490381037124778:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.293960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.354173Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490378525764983:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.356184Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.381047Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490380552969703:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.381484Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.976368Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.985788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.990330Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:33.199332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:33.199607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:33.203397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:33.203479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:33.203597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:33.203627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:33.229841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:33.243305Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:33.300438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.303431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.308860Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:33.308904Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:28:33.327502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.369209Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:33.391402Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.391484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:33.400961Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.398227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.619955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:33.682862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:33.682886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:33.682894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:33.682987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:34.412491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:34.523050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.790140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.067962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:35.183613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.172471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402511963454:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.172584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.172881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402511963464:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.172962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.285318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490381037124778:2094];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.285402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.350305Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490378525764983:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.350375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.377948Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608490380552969703:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.378031Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:37.475739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.515242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.558830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.601553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.633337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.671968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.716549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.770070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.841841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402511964543:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.841927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.842015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402511964548:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.842091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402511964550:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.842146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.845570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:37.863584Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490402511964552:2421], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:28:37.948111Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490402511964621:4694] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:39.575446Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489719570, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2026-02-19T08:28:40.725572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:40.725622Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:40.726385Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:40.736136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:40.736384Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:40.736591Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:40.777770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:40.783985Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:40.784129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:40.785476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:40.785584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:40.785650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:40.786123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:40.786194Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:40.786255Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:40.853183Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:40.881132Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:40.881312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:40.881400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:40.881445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:40.881488Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:40.881518Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.881674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.881725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.881993Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:40.882075Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:40.882176Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.882217Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:40.882277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:40.882319Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:40.882372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:40.882430Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:40.882477Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:40.882576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.882622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.882659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:40.890075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:40.890143Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:40.890223Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:40.890372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:40.890422Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:40.890476Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:40.890514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:40.890543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:40.890574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:40.890602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.890827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:40.890855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:40.890889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:40.890919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.890959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:40.890982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:40.891022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:40.891046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.891064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:40.903271Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:40.903352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.903432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.903482Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:40.903586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:40.904136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.904189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.904227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:40.904349Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:40.904372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:40.904507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.904557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:40.904610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:40.904647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:40.908001Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:40.908079Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.908267Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.908301Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.908356Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.908401Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:40.908438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:40.908471Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:40.908498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 28:41.883738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.883905Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:28:41.883944Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:28:41.883969Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:28:41.884011Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.884057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.884103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.884159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-02-19T08:28:41.884191Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.884417Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.884488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.884533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.884621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-02-19T08:28:41.884673Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.884859Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.884891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.884941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.884979Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-02-19T08:28:41.885021Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.885195Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.885241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:14] at 9437184 on unit FinishPropose 2026-02-19T08:28:41.885293Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-02-19T08:28:41.885386Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.885564Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.885652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.885705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.885810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-02-19T08:28:41.885874Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.886097Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.886143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.886199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.886253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-02-19T08:28:41.886286Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.886498Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.886543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.886638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:28:41.886681Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.886816Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:28:41.886865Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.886915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2026-02-19T08:28:41.886970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:41.887025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-02-19T08:28:41.887053Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.887333Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-02-19T08:28:41.887403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.887458Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2026-02-19T08:28:41.887611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-02-19T08:28:41.887656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.887690Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2026-02-19T08:28:41.887799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-02-19T08:28:41.887839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.887871Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2026-02-19T08:28:41.887998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-02-19T08:28:41.888035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.888067Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2026-02-19T08:28:41.888170Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-02-19T08:28:41.888212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.888240Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2026-02-19T08:28:41.888338Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-02-19T08:28:41.888395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.888437Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2026-02-19T08:28:41.888521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-02-19T08:28:41.888556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.888599Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2026-02-19T08:28:41.888683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-02-19T08:28:41.888735Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:41.888784Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: 2026-02-19T08:28:29.175505Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490367970067555:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.175578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.252026Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490368234927958:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.257318Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.280829Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490366398337057:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.282071Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.304007Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608490369115745005:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:29.304045Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:29.793931Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.794416Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.795583Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:29.793799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.797802Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.873800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:29.997455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.997539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:29.998610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:29.998686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.005091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.005171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.005694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.005808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.006986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:30.007040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:30.058494Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:28:30.058648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.115263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.123077Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:28:30.123126Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:30.123147Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:28:30.128045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.128244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.134268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:30.195171Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.194287Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.194564Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.195452Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.202300Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:30.215636Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:30.243281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:30.257946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.325433Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.465848Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.474863Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:30.630397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:30.630424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:30.630435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:30.630522Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:30.657790Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:32.462583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:32.718297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.427773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.760037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.950551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.176843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490367970067555:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.176906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:34.251122Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490368234927958:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.251188Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:34.261577Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608490366398337057:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.261655Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:34.304069Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608490369115745005:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:34.304124Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:36.386907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490398034840776:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.387030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.391356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490398034840786:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.391446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.724307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.792348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.846884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.912199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.969067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.031682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.086921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.200435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.324661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402329809061:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.324750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.324795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402329809066:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.324988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490402329809068:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.325044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.329005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976735670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:37.350320Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490402329809069:2409], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976735670 completed, doublechecking } 2026-02-19T08:28:37.419893Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490402329809145:4581] txid# 281474976735671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:39.201669Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489719196, txId: 281474976735673] shutting down >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2026-02-19T08:27:59.139037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490236892308639:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:59.139088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:59.412491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:59.412613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:59.458615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.493968Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490236892308511:2081] 1771489679132157 != 1771489679132160 2026-02-19T08:27:59.500813Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:59.512770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:59.729968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:59.742830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:59.747602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:27:59.824998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.949251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:59.997493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:00.151893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:02.658035Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490252346322467:2062];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:02.658087Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:02.718977Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:02.791310Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:02.792602Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490252346322446:2081] 1771489682657537 != 1771489682657540 2026-02-19T08:28:02.825615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:02.825698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:02.829390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:02.994224Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:03.017293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:03.038671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.086805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:03.133912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.484586Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490268003183489:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:06.485621Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:06.503763Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.592964Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:06.605560Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:06.605647Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:06.623716Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:06.703894Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:06.819948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:06.841303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.903504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:06.953580Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:10.295525Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490285142275011:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:10.295573Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:10.309830Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:10.390976Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:10.393486Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608490285142274982:2081] 1771489690294655 != 1771489690294658 2026-02-19T08:28:10.404805Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:10.404911Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:10.431714Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:10.567295Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:10.639393Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:10.655143Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... 3: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:23.053911Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:23.085959Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:28:23.097851Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.202012Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.272512Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:27.558243Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490360770009323:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:27.558350Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:27.583736Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:27.669596Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:27.671174Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490360770009278:2081] 1771489707556906 != 1771489707556909 2026-02-19T08:28:27.684435Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:27.684543Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:27.713964Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:27.824533Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:27.962466Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:27.968034Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:28:27.981540Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.048889Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:28.107354Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:32.773517Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:32.777622Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:32.800855Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:32.802923Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7608490378454837783:2081] 1771489712492473 != 1771489712492476 2026-02-19T08:28:32.840810Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:32.840892Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:32.854881Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.044169Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:33.224733Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:33.242366Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:28:33.259921Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:33.265407Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.361041Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.465435Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.652894Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.855500Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490403515584297:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.855581Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:37.873233Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:28:37.974515Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:37.976314Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608490403515584266:2081] 1771489717854412 != 1771489717854415 2026-02-19T08:28:37.991447Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:37.991565Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:38.027857Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:38.043608Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:28:38.323517Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:38.344586Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:38.407017Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:38.509728Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> VDiskIOTest::HugeBlobIOCount [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageYellow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:09.494998Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:09.496067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:09.497023Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:09.619262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:09.620937Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:09.626842Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:09.627240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:09.627512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:09.629754Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:09.629852Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:10.047222Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:10.166744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.166864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.167626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:10.167736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:10.212667Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:27:10.213374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.213837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:10.283813Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:10.335352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:11.135206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:11.135269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:11.135304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:11.135759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-02-19T08:27:18.185400Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.186523Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:18.186960Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:771:2353], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:27:18.197005Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.198872Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:18.200313Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:776:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:18.200667Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.200860Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.202262Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:18.202389Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:18.545889Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:18.631227Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.631371Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.631836Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:18.631911Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:18.695711Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:18.696310Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.696770Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:18.762638Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:18.815745Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:22.587018Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:22.591184Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:27:22.592907Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:27:22.596787Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:22.596851Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:22.596894Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:22.597758Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:22.600939Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-02-19T08:27:22.601276Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:27:22.627914Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:22.628066Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:22.655854Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-02-19T08:27:22.656534Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:22.776721Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2026-02-19T08:27:22.777500Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } issue_log { id: "RED-3c4a-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-b954-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "RED-5995-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-3c4a-1231c6b1" type: "DATABASE" level: 1 } location { id: 3 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-02-19T08:27:27.519502Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:27.528824Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:27.532174Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TC ... 2-19T08:28:36.103709Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.103777Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.104032Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.104103Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.104349Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.104415Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.104659Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.104748Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.105006Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.105076Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.195798Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-02-19T08:28:36.196384Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-02-19T08:28:36.196526Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2026-02-19T08:28:36.196653Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2026-02-19T08:28:36.196770Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 19 Cookie 19 2026-02-19T08:28:36.196881Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 20 Cookie 20 2026-02-19T08:28:36.196999Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 21 Cookie 21 2026-02-19T08:28:36.197115Z node 14 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 22 Cookie 22 2026-02-19T08:28:36.197498Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.198299Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.198561Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.199473Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.199674Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.199849Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.200077Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.200237Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.200392Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.318553Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.339958Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.391841Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.455190Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.470224Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.486662Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.502361Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.594040Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:36.610574Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:37.682261Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:37.682357Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:37.682418Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:37.683258Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:37.762510Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.762651Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.762708Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.762761Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.762810Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.762862Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.763271Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.766706Z node 15 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.766823Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-17" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 17 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-18" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 18 host: "::1" port: 12005 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-19" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 19 host: "::1" port: 12006 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-20" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 20 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-21" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 21 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-22" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 22 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-17" reason: "YELLOW-7932-1231c6b1-18" reason: "YELLOW-7932-1231c6b1-19" reason: "YELLOW-7932-1231c6b1-20" reason: "YELLOW-7932-1231c6b1-21" reason: "YELLOW-7932-1231c6b1-22" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1da1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-14" type: "STATE_STORAGE_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-b30b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-59f1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-14" type: "SCHEME_BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-106b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-7532" status: YELLOW message: "Multiple rings have unavailable replicas" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-14" type: "BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-0632-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "BOARD_NODE" level: 3 listed: 3 count: 3 } location { id: 14 host: "::1" port: 12001 } >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardOutOfOrder::UncommittedReadSetAck >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount [GOOD] Test command err: Total puts# 5000 Total vdisk writes# 10093 Total log vdisk writes# 5084 Total chunk vdisk writes# 5009 Log vdisk writes per put# 1.0168 Chunk vdisk writes per put# 1.0018 VDisk writes per put# 2.0186 DeviceWrites 10093 WriteLog requests 5084 WriteHugeUser requests 5000 >> DataShardOutOfOrder::TestReadTableWriteConflict |94.5%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2026-02-19T08:28:40.010659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:40.010719Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:40.012574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:40.026217Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:40.026607Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:40.026854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:40.075437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:40.082811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:40.083008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:40.084722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:40.084851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:40.084919Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:40.085337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:40.085421Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:40.085496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:40.160491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:40.200685Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:40.200896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:40.201004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:40.201059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:40.201098Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:40.201133Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.201300Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.201348Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.201664Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:40.201789Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:40.201923Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.201982Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:40.202037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:40.202083Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:40.202143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:40.202179Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:40.202227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:40.202352Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.202408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.202452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:40.209644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:40.209756Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:40.209865Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:40.210054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:40.210126Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:40.210217Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:40.210272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:40.210312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:40.210349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:40.210386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.210696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:40.210743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:40.210790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:40.210827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.210890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:40.210928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:40.210975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:40.211016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.211047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:40.226279Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:40.226353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.226428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.226477Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:40.226558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:40.227089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.227145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.227193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:40.227359Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:40.227419Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:40.227573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.227634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:40.227676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:40.227725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:40.241812Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:40.241912Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.242153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.242211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.242295Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.242343Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:40.242403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:40.242448Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:40.242502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... SHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.613455Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-02-19T08:28:44.613556Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:28:44.613579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.613599Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-02-19T08:28:44.613670Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:44.613725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.613750Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:28:44.613801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [1:238:2229], Recipient [1:459:2400]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-02-19T08:28:44.613845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:44.613887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-02-19T08:28:44.613938Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-02-19T08:28:44.613972Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-02-19T08:28:44.614035Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:44.614153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:28:44.614178Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.614200Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-02-19T08:28:44.614241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:44.614263Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.614282Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-02-19T08:28:44.614330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:459:2400], Recipient [1:459:2400]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:44.614364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:44.614405Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-02-19T08:28:44.614441Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:44.614478Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-02-19T08:28:44.614502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-02-19T08:28:44.614541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:44.614568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-02-19T08:28:44.614593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-02-19T08:28:44.614627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-02-19T08:28:44.614655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:44.614677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-02-19T08:28:44.614704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-02-19T08:28:44.614729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-02-19T08:28:44.615188Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-02-19T08:28:44.615233Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:44.615303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-02-19T08:28:44.615327Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-02-19T08:28:44.615353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-02-19T08:28:44.615376Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:44.615554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-02-19T08:28:44.615580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-02-19T08:28:44.615605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-02-19T08:28:44.615627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-02-19T08:28:44.615662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:44.615687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-02-19T08:28:44.615713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-02-19T08:28:44.615736Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:44.615758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-02-19T08:28:44.615795Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-02-19T08:28:44.615820Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-02-19T08:28:44.616006Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:28:44.616039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.616079Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-02-19T08:28:44.616173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:44.616210Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.616247Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-02-19T08:28:44.616317Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:44.616343Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.616363Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:28:44.629629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:44.629693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:44.629767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:28:44.629819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:44.629880Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:44.630132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:44.630167Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:44.630196Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::LocksBrokenStats >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2026-02-19T08:28:46.462503Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:46.465601Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-02-19T08:28:46.471729Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:46.471802Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-02-19T08:28:46.477403Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:124} Inline read request KeyValue# 1 Status# OK 2026-02-19T08:28:46.477468Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:136} Expected OK or UNKNOWN and given OK readCount# 0 2026-02-19T08:28:46.477547Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:36.912972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:37.002303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:37.018342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:37.018428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:37.018906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:37.283780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:37.283920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:37.346575Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489714049853 != 1771489714049857 2026-02-19T08:28:37.356753Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:37.400330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:37.502018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:37.790114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:37.803733Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.908427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:38.226893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:38.445615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1120:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:38.445745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2925], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:38.445812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:38.446707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1135:2929], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:38.446794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:38.451234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:38.607586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1134:2928], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:38.687172Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1192:2967] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets ... sending immediate upsert ... waiting for immediate propose ... immediate upsert is blocked 2026-02-19T08:28:39.797309Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2026-02-19T08:28:39.808270Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:909: SelfId: [1:1377:3020], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1267:3020]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1377:3020]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2026-02-19T08:28:39.808943Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:1371:3020], SessionActorId: [1:1267:3020], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1267:3020]. 2026-02-19T08:28:39.809928Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=N2ZjY2M0YWUtNThlOWI2YzItZDMzZmE2YzMtYzIxMTY2YjQ=, ActorId: [1:1267:3020], ActorState: ExecuteState, LegacyTraceId: 01khtg7xd9a83cmh7x900zwxr6, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1372:3020] from: [1:1371:3020] trace_id# 2026-02-19T08:28:39.810240Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:1372:3020] TxId: 281474976715665. Ctx: { TraceId: 01khtg7xd9a83cmh7x900zwxr6, Database: , SessionId: ydb://session/3?node_id=1&id=N2ZjY2M0YWUtNThlOWI2YzItZDMzZmE2YzMtYzIxMTY2YjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } trace_id# 2026-02-19T08:28:39.813950Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=N2ZjY2M0YWUtNThlOWI2YzItZDMzZmE2YzMtYzIxMTY2YjQ=, ActorId: [1:1267:3020], ActorState: ExecuteState, LegacyTraceId: 01khtg7xd9a83cmh7x900zwxr6, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`." issue_code: 2006 severity: 1 issues { message: "Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting" issue_code: 2006 severity: 1 } } trace_id# 2026-02-19T08:28:39.815117Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1318: SelfId: [1:1344:3022], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1269:3022]TEvDeliveryProblem was received from tablet: 72075186224037889 2026-02-19T08:28:39.815232Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:1336:3022], SessionActorId: [1:1269:3022], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1269:3022]. 2026-02-19T08:28:39.815634Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=1&id=ZjFiNTZiOGYtNDgxMWEzY2ItOTA1NjQxZGMtOGQwYmI5Zg==, ActorId: [1:1269:3022], ActorState: ExecuteState, LegacyTraceId: 01khtg7xa01ebn73ace73929yy, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1337:3022] from: [1:1336:3022] trace_id# 2026-02-19T08:28:39.816107Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:1337:3022] TxId: 281474976715664. Ctx: { TraceId: 01khtg7xa01ebn73ace73929yy, Database: , SessionId: ydb://session/3?node_id=1&id=ZjFiNTZiOGYtNDgxMWEzY2ItOTA1NjQxZGMtOGQwYmI5Zg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } trace_id# 2026-02-19T08:28:39.817018Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZjFiNTZiOGYtNDgxMWEzY2ItOTA1NjQxZGMtOGQwYmI5Zg==, ActorId: [1:1269:3022], ActorState: ExecuteState, LegacyTraceId: 01khtg7xa01ebn73ace73929yy, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } } ... waiting for SysViewsRoster update finished 2026-02-19T08:28:43.259695Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:43.264905Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:43.268881Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:43.269161Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:43.269199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:43.466289Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:43.466388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:43.482443Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489720664551 != 1771489720664555 2026-02-19T08:28:43.484654Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:43.548885Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:43.616081Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:43.895219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:43.908167Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:44.010823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:44.234750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-02-19T08:28:44.446552Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:44.446652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:44.446744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:44.447731Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:44.447923Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:44.452019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:44.634362Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:44.669689Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1103:2918] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet 2026-02-19T08:28:45.317442Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1318: SelfId: [2:1219:2959], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1165:2959]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2026-02-19T08:28:41.455673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:41.455728Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:41.456664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:41.469304Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:41.469671Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:41.469941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:41.517086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:41.524365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:41.524544Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:41.526266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:41.526348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:41.526408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:41.526816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:41.526901Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:41.526988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:41.594422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:41.631079Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:41.631260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:41.631350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:41.631403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:41.631446Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:41.631479Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.631627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.631671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.631963Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:41.632059Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:41.632165Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:41.632215Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:41.632260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:41.632294Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:41.632353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:41.632386Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:41.632430Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:41.632545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.632608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.632651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:41.638758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:41.638832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:41.638912Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:41.639057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:41.639138Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:41.639206Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:41.639244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:41.639278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:41.639306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:41.639335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:41.639583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:41.639620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:41.639651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:41.639680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:41.639731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:41.639783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:41.639816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:41.639845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:41.639867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:41.651774Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:41.651842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:41.651902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:41.651942Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:41.652003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:41.652456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.652500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.652541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:41.652682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:41.652715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:41.652867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:41.652908Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:41.652957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:41.653005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:41.656619Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:41.656684Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:41.656860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.656906Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.656945Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:41.656972Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:41.656997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:41.657024Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:41.657061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... Check candidate unit PlanQueue at 9437186 2026-02-19T08:28:46.307028Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-02-19T08:28:46.307053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-02-19T08:28:46.307428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:347:2313]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:46.307480Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.307526Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-02-19T08:28:46.307730Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.307787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.307842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:28:46.307895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:28:46.307931Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.308062Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.308094Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.308117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.308153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:46.308193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:28:46.308244Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.308377Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.308403Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.308445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.308485Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:46.308526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:28:46.308573Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.308675Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.308697Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.308715Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.308734Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.308757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.308800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:46.308860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:28:46.308885Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.309014Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.309049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.309093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:46.309136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:46.309158Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.309255Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.309279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.309318Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:46.309363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:46.309412Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.309514Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.309561Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:46.309703Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:28:46.309757Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.309784Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-02-19T08:28:46.309905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:28:46.309948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.309977Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-02-19T08:28:46.310040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:28:46.310065Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.310088Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-02-19T08:28:46.310197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:28:46.310237Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.310286Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-02-19T08:28:46.310360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:46.310396Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.310418Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-02-19T08:28:46.310494Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:46.310518Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.310540Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:28:46.324145Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.324217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.324286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 4 ms 2026-02-19T08:28:46.324348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:46.324392Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.324662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:458:2399], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:46.324702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.324734Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2026-02-19T08:28:41.924810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:41.924869Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:41.925678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:41.938408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:41.938758Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:41.939000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:41.987008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:41.993801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:41.993982Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:41.995685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:41.995755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:41.995806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:41.996211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:41.996298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:41.996392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:42.054408Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.086031Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:42.086202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.086295Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:42.086343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:42.086377Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:42.086407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.086565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.086611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.086875Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:42.086965Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:42.087083Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.087118Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.087164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:42.087200Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:42.087259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:42.087293Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:42.087330Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:42.087427Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.087472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.087512Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:42.094523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:42.094585Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.094675Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:42.094834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:42.094905Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:42.094974Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:42.095021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.095055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:42.095087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:42.095119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.095393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.095433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:42.095467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:42.095500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.095561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:42.095595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:42.095625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:42.095653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.095675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.107734Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:42.107811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.107854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.107886Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:42.107946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:42.108334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.108372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.108403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:42.108499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:42.108526Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:42.108666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.108698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-02-19T08:28:42.108726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:42.108750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:42.111498Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:42.111555Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.111723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.111754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.111788Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.111825Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:42.111857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:42.111884Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-02-19T08:28:42.111910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-02-19T08:28:42. ... latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.645285Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:46.645306Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.645432Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.645456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.645488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.645528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:46.645550Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.645644Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:46.645665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:46.645724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.645773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:46.645796Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:46.646050Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:28:46.646090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646141Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-02-19T08:28:46.646246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:28:46.646286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646332Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-02-19T08:28:46.646433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:28:46.646459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646481Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-02-19T08:28:46.646545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:28:46.646583Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646608Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-02-19T08:28:46.646681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:46.646705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646743Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-02-19T08:28:46.646798Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:46.646838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646867Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:28:46.646950Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:46.646973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.646995Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-02-19T08:28:46.647050Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:46.647093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2026-02-19T08:28:46.647145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.647197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:28:46.647228Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.647378Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:46.647403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2026-02-19T08:28:46.647438Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.647473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:28:46.647499Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.647609Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:46.647632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2026-02-19T08:28:46.647676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.647725Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:46.647758Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.647875Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:46.647912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2026-02-19T08:28:46.647945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:46.647980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:46.648004Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.648174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:28:46.648210Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.648237Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-02-19T08:28:46.648340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:28:46.648363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.648407Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-02-19T08:28:46.648500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:46.648541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.648586Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:28:46.648654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:46.648678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:46.648698Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:36.178587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:36.307508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:36.339668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:36.339767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:36.340353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:36.659258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:36.659409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:36.743477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489713304511 != 1771489713304515 2026-02-19T08:28:36.753491Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:36.799680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:36.888529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:37.204655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:37.218163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:37.322806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.585286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:37.805413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.805497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.805587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.806495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.806723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:37.810403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:37.956443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:38.025326Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2917] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... performing the first select { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets ... captured readset ... captured readset ... performing an upsert ... performing the second select ... performing the third select ... performing the last upsert and commit 2026-02-19T08:28:40.135620Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=OTM3YTRmOWItNTU2N2VkOTctNDA4ZTc4NDYtZGQzMDU2MDI=, ActorId: [1:1171:2965], ActorState: ExecuteState, LegacyTraceId: 01khtg7xrhaz937nvfk2gq3xw5, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-02-19T08:28:43.591421Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:43.596591Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:43.601106Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:43.601457Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:43.601518Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:43.838445Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:43.838569Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:43.863981Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489720767950 != 1771489720767954 2026-02-19T08:28:43.867128Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:43.910615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:43.990590Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:44.252381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:44.265519Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:44.367580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:44.388563Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [2:883:2764], Recipient [2:892:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:44.389540Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [2:883:2764], Recipient [2:892:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:44.389843Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:892:2770] 2026-02-19T08:28:44.390079Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:44.436133Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [2:883:2764], Recipient [2:892:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:44.436907Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:44.437018Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:44.438550Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:44.438632Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:44.438692Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:44.438993Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:44.439121Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:44.439209Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting star ... # 4001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-02-19T08:28:45.764252Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:45.764268Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2026-02-19T08:28:46.442861Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1277:3047], Recipient [2:892:2770]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-02-19T08:28:46.443126Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:46.443199Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:46.443285Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:28:46.443321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:46.443362Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:46.443398Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:46.443446Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-02-19T08:28:46.443483Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:28:46.443507Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:46.443530Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:46.443552Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:46.443683Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:46.443923Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:46.443983Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-02-19T08:28:46.444028Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:1277:3047], 0} after executionsCount# 1 2026-02-19T08:28:46.444075Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:1277:3047], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:46.444153Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:1277:3047], 0} finished in read 2026-02-19T08:28:46.444233Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:28:46.444260Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:46.444286Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:46.444314Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:46.444352Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-02-19T08:28:46.444373Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:46.444401Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-02-19T08:28:46.444437Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:46.444522Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:46.444707Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1278:3048], Recipient [2:976:2829]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-02-19T08:28:46.444792Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:28:46.444837Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-02-19T08:28:46.444888Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:28:46.444912Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:28:46.444935Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:46.444960Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:28:46.444994Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-02-19T08:28:46.445023Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:28:46.445046Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:46.445067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:28:46.445089Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:28:46.445171Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:46.445358Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 39] 2026-02-19T08:28:46.445410Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-02-19T08:28:46.445446Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[2:1278:3048], 0} after executionsCount# 1 2026-02-19T08:28:46.445478Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[2:1278:3048], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:46.445526Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[2:1278:3048], 0} finished in read 2026-02-19T08:28:46.445570Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:28:46.445596Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:28:46.445620Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:28:46.445646Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:28:46.445679Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-02-19T08:28:46.445745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:28:46.445769Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-02-19T08:28:46.445795Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:28:46.445854Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:28:46.446141Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [2:71:2118], Recipient [2:892:2770]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-02-19T08:28:46.446253Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [2:71:2118], Recipient [2:976:2829]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-02-19T08:28:46.448034Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1277:3047], Recipient [2:892:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:46.448095Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:28:46.448304Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1278:3048], Recipient [2:976:2829]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:46.448337Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> DataShardTxOrder::ZigZag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2026-02-19T08:28:47.813454Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:47.815665Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-02-19T08:28:47.821513Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:47.821582Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-02-19T08:28:47.827441Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:47.827563Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:312} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2026-02-19T08:28:47.827291Z ErrorReason# >> TKeyValueTest::TestRewriteThenLastValueNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2026-02-19T08:28:42.173824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:42.173883Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.174809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.188078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.188429Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:42.188728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.235004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.242330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.242515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.244249Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:42.244321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:42.244373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:42.244988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.245086Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.245164Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:42.314410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.351616Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:42.351800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.351917Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:42.351970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:42.352004Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:42.352043Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.352204Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.352250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.352535Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:42.352636Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:42.352740Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.352783Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.352835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:42.352884Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:42.352930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:42.352974Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:42.353018Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:42.353112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.353154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.353193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:42.359262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:42.359336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.359429Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:42.359596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:42.359666Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:42.359732Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:42.359773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.359812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:42.359848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:42.359882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.360105Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.360130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:42.360166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:42.360201Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.360252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:42.360274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:42.360304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:42.360339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.360357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.372055Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:42.372119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.372164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.372208Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:42.372266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:42.372699Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.372742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.372778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:42.372890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:42.372929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:42.373064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.373100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:42.373137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:42.373167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:42.375835Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:42.375881Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.376035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.376066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.376106Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.376151Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:42.376186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:42.376226Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:42.376257Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-02-19T08:28:47.683529Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:47.683677Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:351:2317], Recipient [2:351:2317]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:47.683700Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:47.683729Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-02-19T08:28:47.683751Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:47.683769Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:28:47.683791Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-02-19T08:28:47.683808Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-02-19T08:28:47.683825Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.683842Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-02-19T08:28:47.683870Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-02-19T08:28:47.683887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-02-19T08:28:47.684355Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-02-19T08:28:47.684383Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684403Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-02-19T08:28:47.684420Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-02-19T08:28:47.684437Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-02-19T08:28:47.684459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684471Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-02-19T08:28:47.684483Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:47.684496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-02-19T08:28:47.684537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-02-19T08:28:47.684566Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-02-19T08:28:47.684595Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-02-19T08:28:47.684624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684637Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:47.684651Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-02-19T08:28:47.684664Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-02-19T08:28:47.684691Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684704Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-02-19T08:28:47.684719Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-02-19T08:28:47.684735Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-02-19T08:28:47.684749Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-02-19T08:28:47.684776Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-02-19T08:28:47.684800Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-02-19T08:28:47.684819Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684832Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-02-19T08:28:47.684844Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-02-19T08:28:47.684857Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-02-19T08:28:47.684870Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684881Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-02-19T08:28:47.684892Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-02-19T08:28:47.684905Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-02-19T08:28:47.684927Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.684943Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-02-19T08:28:47.684956Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-02-19T08:28:47.684970Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-02-19T08:28:47.685205Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-02-19T08:28:47.685239Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:47.685271Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.685294Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-02-19T08:28:47.685314Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-02-19T08:28:47.685329Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-02-19T08:28:47.685445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-02-19T08:28:47.685461Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-02-19T08:28:47.685479Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-02-19T08:28:47.685495Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-02-19T08:28:47.685554Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:47.685572Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-02-19T08:28:47.685587Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-02-19T08:28:47.685604Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:47.685621Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:28:47.685643Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-02-19T08:28:47.685660Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-02-19T08:28:47.698594Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-02-19T08:28:47.698649Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-02-19T08:28:47.698698Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:47.698729Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-02-19T08:28:47.698792Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:47.698833Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-02-19T08:28:47.699221Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-02-19T08:28:47.699251Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-02-19T08:28:47.699277Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:47.699296Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-02-19T08:28:47.699332Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:47.699356Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:42.368678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:42.479036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:42.491585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:42.491638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:42.491931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:42.706610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:42.706716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:42.771625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489720097557 != 1771489720097561 2026-02-19T08:28:42.779010Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.820671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:42.914900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:43.184440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:43.185952Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-02-19T08:28:43.185994Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:404:2403] Proxy marker# C1 2026-02-19T08:28:43.196924Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:43.292052Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-02-19T08:28:43.292136Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-02-19T08:28:43.292304Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-02-19T08:28:43.292605Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-02-19T08:28:43.292663Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:404:2403] Proxy 2026-02-19T08:28:43.293841Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-02-19T08:28:43.293921Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-02-19T08:28:43.293966Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-02-19T08:28:43.294030Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-02-19T08:28:43.298938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:43.329460Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:882:2764], Recipient [1:890:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:43.330168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:882:2764], Recipient [1:890:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:43.330357Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2770] 2026-02-19T08:28:43.330515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:43.338607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:882:2764], Recipient [1:890:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:43.367032Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:43.367179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:43.368731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:43.368803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:43.368853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:43.369263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:43.369397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:43.369476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:906:2770] in generation 1 2026-02-19T08:28:43.369903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:43.398153Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:43.398289Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:43.398379Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:908:2780] 2026-02-19T08:28:43.398404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:43.398427Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:43.398455Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:43.398595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:890:2770], Recipient [1:890:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:43.398633Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:43.398825Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:43.398898Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:43.398952Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:43.399029Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:43.399060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:43.399082Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:43.399107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:43.399130Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:43.399159Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:43.399212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:893:2771], Recipient [1:890:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:43.399239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:43.399278Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2769], serverId# [1:893:2771], sessionId# [0:0:0] 2026-02-19T08:28:43.399350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:893:2771] 2026-02-19T08:28:43.399380Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:43.399446Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:43.399626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:43.399667Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:43.399777Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:43.399810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:43.399839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:43.399860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:43.399884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:43.400090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplet ... shard_impl.h:3216: StateWork, received event# 269553215, Sender [1:1598:3268], Recipient [1:1541:3233]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-02-19T08:28:47.564690Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:47.564775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:47.564876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:47.564950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:47.564992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:47.565027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:47.565075Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-02-19T08:28:47.565112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:47.565138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:47.565160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:47.565184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:47.565298Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:47.565567Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:47.565626Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4500/18446744073709551615 2026-02-19T08:28:47.565672Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[1:1598:3268], 0} after executionsCount# 1 2026-02-19T08:28:47.565768Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[1:1598:3268], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:47.565850Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[1:1598:3268], 0} finished in read 2026-02-19T08:28:47.565911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:47.565938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:47.565970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:47.565998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:47.566036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:47.566057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:47.566082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-02-19T08:28:47.566123Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:47.566214Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:47.567507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [1:1598:3268], Recipient [1:1541:3233]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:47.567580Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2026-02-19T08:28:47.713113Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2026-02-19T08:28:47.713204Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 5500 in 0.499900s at 5.450000s 2026-02-19T08:28:47.715656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [1:1620:3285], Recipient [1:1541:3233]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-02-19T08:28:47.715793Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:47.715851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:47.715927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:47.715964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:47.716003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:47.716041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:47.716076Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-02-19T08:28:47.716107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:47.716126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:47.716143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:47.716158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:47.716244Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:47.716504Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:47.716579Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-02-19T08:28:47.716618Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[1:1620:3285], 0} after executionsCount# 1 2026-02-19T08:28:47.716662Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[1:1620:3285], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:47.716721Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[1:1620:3285], 0} finished in read 2026-02-19T08:28:47.716787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:47.716811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:47.716831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:47.716852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:47.716882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:47.716897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:47.716921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-02-19T08:28:47.716951Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:47.717034Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:47.717235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [1:71:2118], Recipient [1:1541:3233]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2026-02-19T08:28:47.718242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [1:1620:3285], Recipient [1:1541:3233]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:47.718287Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:38.146550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:38.231927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:38.244605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:38.244663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:38.244977Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:38.472476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:38.472613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:38.528686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489715394650 != 1771489715394654 2026-02-19T08:28:38.538217Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:38.579265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:38.675681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:38.961809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:38.975160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:39.078296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:39.112098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:39.113111Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:39.113361Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:39.113574Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:39.123140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:39.162689Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:39.162834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:39.164471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:39.164548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:39.164634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:39.165025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:39.165162Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:39.165233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:39.175903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:39.209387Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:39.209553Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:39.209651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:39.209686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:39.209748Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:39.209783Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:39.209992Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:39.210045Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:39.210304Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:39.210389Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:39.210522Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:39.210562Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:39.210597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:39.210635Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:39.210669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:39.210699Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:39.210741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:39.210828Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:39.210880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:39.210925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:39.211009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:39.211057Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:39.211189Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:39.211428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:39.211478Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:39.211586Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:39.211632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:39.211705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:39.211743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:39.211776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:39.212059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:39.212093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:39.212140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:39.212175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:39.212216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:39.212249Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:39.212277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:39.212350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:39.212383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:39.214047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:39.214096Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:39.225500Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CreateTimeMs: 1771489727545 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.554550Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1277:3063] 2026-02-19T08:28:47.554596Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1280:3066], CA [2:1281:3067], CA [2:1278:3064], CA [2:1282:3068], CA [2:1279:3065], trace_id# 2026-02-19T08:28:47.554651Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1280:3066], CA [2:1281:3067], CA [2:1278:3064], CA [2:1282:3068], CA [2:1279:3065], trace_id# 2026-02-19T08:28:47.554809Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1278:3064] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 390 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 231 FinishTimeMs: 1771489727553 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ComputeCpuTimeUs: 159 BuildCpuTimeUs: 72 WaitOutputTimeUs: 227 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489727552 CreateTimeMs: 1771489727545 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.554860Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1278:3064] 2026-02-19T08:28:47.554918Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1280:3066], CA [2:1281:3067], CA [2:1282:3068], CA [2:1279:3065], trace_id# 2026-02-19T08:28:47.554968Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1280:3066], CA [2:1281:3067], CA [2:1282:3068], CA [2:1279:3065], trace_id# 2026-02-19T08:28:47.555150Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1279:3065] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 250 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 130 FinishTimeMs: 1771489727553 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ComputeCpuTimeUs: 91 BuildCpuTimeUs: 39 WaitOutputTimeUs: 303 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489727552 CreateTimeMs: 1771489727546 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.555200Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1279:3065] 2026-02-19T08:28:47.555256Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1280:3066], CA [2:1281:3067], CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555320Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1280:3066], CA [2:1281:3067], CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555454Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1280:3066] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 431 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 294 FinishTimeMs: 1771489727553 InputRows: 2 InputBytes: 12 OutputRows: 2 OutputBytes: 8 ComputeCpuTimeUs: 225 BuildCpuTimeUs: 69 WaitOutputTimeUs: 339 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489727552 CreateTimeMs: 1771489727546 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.555492Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1280:3066] 2026-02-19T08:28:47.555528Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1281:3067], CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555585Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1281:3067], CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555690Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1281:3067] TaskId# 6 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 253 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 134 FinishTimeMs: 1771489727553 InputRows: 2 InputBytes: 8 OutputRows: 2 OutputBytes: 8 ComputeCpuTimeUs: 72 BuildCpuTimeUs: 62 WaitOutputTimeUs: 352 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489727546 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.555723Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1281:3067] 2026-02-19T08:28:47.555757Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555793Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1282:3068], trace_id# 2026-02-19T08:28:47.555901Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1282:3068] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 275 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 151 FinishTimeMs: 1771489727553 InputRows: 2 InputBytes: 8 OutputRows: 2 OutputBytes: 8 ResultRows: 2 ResultBytes: 8 ComputeCpuTimeUs: 102 BuildCpuTimeUs: 49 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489727552 CreateTimeMs: 1771489727546 UpdateTimeMs: 1771489727553 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:47.555929Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1282:3068] 2026-02-19T08:28:47.556070Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:47.556147Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1272:3044] TxId: 281474976715667. Ctx: { TraceId: 01khtg84q4cdhyd0p2c3sq8za9, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE4NDhiMWQtMWQwNThjZjUtOWZkOTE2NjItNjMxODg5ZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002835s ReadRows: 2 ReadBytes: 16 RequestUnits# 2 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2026-02-19T08:28:41.856590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:41.856653Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:41.857521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:41.870976Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:41.871346Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:41.871625Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:41.921551Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:41.927895Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:41.928032Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:41.929295Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:41.929344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:41.929419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:41.929734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:41.929804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:41.929871Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:41.998762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.034617Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:42.034814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.034924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:42.034998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:42.035036Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:42.035070Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.035231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.035283Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.035579Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:42.035668Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:42.035777Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.035818Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.035873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:42.035920Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:42.035978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:42.036013Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:42.036057Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:42.036166Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.036215Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.036262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:42.042849Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:42.042923Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.043016Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:42.043180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:42.043250Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:42.043320Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:42.043384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.043432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:42.043474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:42.043507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.043795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.043833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:42.043872Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:42.043910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.043962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:42.043995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:42.044030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:42.044069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.044114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.056254Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:42.056321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.056383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.056427Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:42.056541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:42.057049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.057105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.057150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:42.057291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:42.057341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:42.057487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.057535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:42.057573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:42.057627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:42.061562Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:42.061629Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.061860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.061902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.061961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.062001Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:42.062049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:42.062103Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:42.062142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 6-02-19T08:28:48.152172Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:48.152356Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:240:2231], Recipient [2:240:2231]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.152390Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.152428Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:48.152456Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:48.152481Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:48.152512Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2026-02-19T08:28:48.152553Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2026-02-19T08:28:48.152585Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.152607Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2026-02-19T08:28:48.152629Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2026-02-19T08:28:48.152654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2026-02-19T08:28:48.153316Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2026-02-19T08:28:48.153359Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.153406Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2026-02-19T08:28:48.153434Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2026-02-19T08:28:48.153461Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2026-02-19T08:28:48.153500Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.153522Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2026-02-19T08:28:48.153542Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:48.153563Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2026-02-19T08:28:48.153617Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2026-02-19T08:28:48.153644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2026-02-19T08:28:48.153668Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2026-02-19T08:28:48.153699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.153738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:48.153763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2026-02-19T08:28:48.153794Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2026-02-19T08:28:48.153841Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.153863Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2026-02-19T08:28:48.153890Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2026-02-19T08:28:48.153912Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2026-02-19T08:28:48.153936Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.153957Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2026-02-19T08:28:48.153996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2026-02-19T08:28:48.154029Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2026-02-19T08:28:48.154056Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.154079Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2026-02-19T08:28:48.154100Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2026-02-19T08:28:48.154123Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2026-02-19T08:28:48.154144Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.154164Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2026-02-19T08:28:48.154184Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2026-02-19T08:28:48.154206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2026-02-19T08:28:48.154228Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.154250Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2026-02-19T08:28:48.154273Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2026-02-19T08:28:48.154327Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2026-02-19T08:28:48.154687Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2026-02-19T08:28:48.154738Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:48.154784Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.154810Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:28:48.154834Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2026-02-19T08:28:48.154858Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2026-02-19T08:28:48.155018Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2026-02-19T08:28:48.155043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2026-02-19T08:28:48.155074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2026-02-19T08:28:48.155098Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2026-02-19T08:28:48.155128Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-02-19T08:28:48.155149Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2026-02-19T08:28:48.155172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2026-02-19T08:28:48.155215Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:48.155240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:48.155270Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:48.155295Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:48.168458Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-02-19T08:28:48.168575Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-02-19T08:28:48.168649Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:48.168697Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-02-19T08:28:48.168762Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:28:48.168816Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:48.169872Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-02-19T08:28:48.169914Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-02-19T08:28:48.169954Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:48.169983Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-02-19T08:28:48.170022Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:48.170070Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestCopyRangeWorks >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> TKeyValueTest::TestConcatWorks >> TKeyValueTest::TestWriteReadPatchRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:38.787620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:38.913768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:38.932132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:38.932211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:38.932681Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:39.224471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:39.224625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:39.306743Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489716204814 != 1771489716204818 2026-02-19T08:28:39.315051Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:39.360293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:39.448620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:39.742739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:39.755884Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:39.859825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:39.893429Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:39.894481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:39.894764Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:39.895051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:39.904866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:39.934928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:39.935073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:39.936670Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:39.936736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:39.936790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:39.937201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:39.937328Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:39.937403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:39.948166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:39.989309Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:39.989515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:39.989636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:39.989676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:39.989740Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:39.989785Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:39.990029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:39.990090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:39.990395Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:39.990495Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:39.990663Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:39.990714Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:39.990751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:39.990790Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:39.990837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:39.990872Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:39.990923Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:39.991017Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:39.991068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:39.991117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:39.991202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:39.991254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:39.991367Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:39.991620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:39.991682Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:39.991793Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:39.991839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:39.991894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:39.991932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:39.991970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:39.992277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:39.992317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:39.992365Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:39.992405Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:39.992454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:39.992492Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:39.992524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:39.992621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:39.992656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:39.994430Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:39.994483Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:40.005235Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... CurrentExecutionId : . DatabaseId : /Root. Database : . }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-02-19T08:28:48.496172Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.498020Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.499056Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1096:2913];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1093:2910];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.499171Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1096:2913];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.499564Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1088:2883] TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1093:2910] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 184742 Tasks { TaskId: 1 CpuTimeUs: 181767 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 28 BuildCpuTimeUs: 181739 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489727861 CurrentWaitInputTimeUs: 130396 UpdateTimeMs: 1771489728496 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:48.499643Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1093:2910] 2026-02-19T08:28:48.500480Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1221} ActorId: [2:1088:2883] TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-02-19T08:28:48.500589Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1214} ActorId: [2:1088:2883] TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1094:2911] TaskId# 2 trace_id# 2026-02-19T08:28:48.500671Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1214} ActorId: [2:1088:2883] TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1095:2912] TaskId# 3 trace_id# 2026-02-19T08:28:48.501274Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1088:2883] TxId: 281474976715662. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:48.501406Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 0.550000s 2026-02-19T08:28:48.501468Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1094:2911], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01khtg84t5939snw8arrkcsnbm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-02-19T08:28:48.501551Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [2:1094:2911], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01khtg84t5939snw8arrkcsnbm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1088:2883], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-02-19T08:28:48.501860Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.503469Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.503614Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1095:2912], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01khtg84t5939snw8arrkcsnbm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-02-19T08:28:48.503681Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [2:1095:2912], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01khtg84t5939snw8arrkcsnbm. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1088:2883], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-02-19T08:28:48.503800Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.506743Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.507169Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, ActorId: [2:1061:2883], ActorState: ExecuteState, LegacyTraceId: 01khtg84t5939snw8arrkcsnbm, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-02-19T08:28:48.507641Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:28:48.507696Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:69:2116] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-02-19T08:28:48.508117Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-02-19T08:28:48.508192Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:69:2116] Handle TEvProposeTransaction 2026-02-19T08:28:48.508237Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:69:2116] TxId# 0 ProcessProposeTransaction 2026-02-19T08:28:48.510273Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:69:2116] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1129:2944] SnapshotReq marker# P0 2026-02-19T08:28:48.510700Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1131:2944] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-02-19T08:28:48.510910Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-02-19T08:28:48.511034Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:28:48.511109Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:1128:2883] TxId: 281474976715664. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:28:48.511229Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1128:2883] TxId: 281474976715664. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:48.511296Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1128:2883] TxId: 281474976715664. Ctx: { TraceId: 01khtg84t5939snw8arrkcsnbm, Database: , SessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:28:48.511558Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 3, sender: [2:836:2730], selfId: [2:67:2114], source: [2:1061:2883] 2026-02-19T08:28:48.511611Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1131:2944] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-02-19T08:28:48.511766Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1129:2944] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-02-19T08:28:48.512016Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1129:2944], Recipient [2:893:2771]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-02-19T08:28:48.512845Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=2&id=MTRiYzBjYWItNmU1NDEwMjItOWVhOWM5YS05ZDA1NjA3Mg==, workerId: [2:1061:2883], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } EffectivePoolId: "default" } YdbStatus: ABORTED ConsumedRu: 256 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:40.733693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:40.845647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:40.863180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:40.863254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:40.863687Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:41.136749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:41.136881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:41.218748Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489718134230 != 1771489718134234 2026-02-19T08:28:41.235684Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:41.279464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:41.361556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:41.666638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:41.680406Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:41.784479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:41.817227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:41.818239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:41.818525Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:41.818730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:41.827369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:41.852461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:41.852608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:41.854060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:41.854123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:41.854171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:41.854523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:41.854639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:41.854700Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:41.866676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:41.894275Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:41.894477Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:41.894607Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:41.894650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:41.894694Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:41.894731Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:41.894951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.895000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:41.895283Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:41.895391Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:41.895555Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:41.895615Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:41.895660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:41.895699Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:41.895733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:41.895766Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:41.895811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:41.895915Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.895966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:41.896015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:41.896140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:41.896202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:41.896321Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:41.896643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:41.896709Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:41.896817Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:41.896861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:41.896899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:41.896938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:41.896970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:41.897265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:41.897306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:41.897363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:41.897458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:41.897505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:41.897539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:41.897574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:41.897612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:41.897638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:41.899444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:41.899493Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:41.910253Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... ctorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2026-02-19T08:28:48.595435Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.596848Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.597003Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1099:2912];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1096:2909];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.597053Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1099:2912];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.597261Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1091:2882] TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1096:2909] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 186749 Tasks { TaskId: 1 CpuTimeUs: 184767 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 18 BuildCpuTimeUs: 184749 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489727905 CurrentWaitInputTimeUs: 115562 UpdateTimeMs: 1771489728595 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:48.597310Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1096:2909] 2026-02-19T08:28:48.597379Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1221} ActorId: [2:1091:2882] TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-02-19T08:28:48.597434Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1214} ActorId: [2:1091:2882] TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1097:2910] TaskId# 2 trace_id# 2026-02-19T08:28:48.597478Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1214} ActorId: [2:1091:2882] TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1098:2911] TaskId# 3 trace_id# 2026-02-19T08:28:48.597547Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1091:2882] TxId: 281474976715662. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:48.597779Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, ActorId: [2:1064:2882], ActorState: ExecuteState, LegacyTraceId: 01khtg84wm1ac8ar721daymevf, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-02-19T08:28:48.598083Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:28:48.598115Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:69:2116] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-02-19T08:28:48.598157Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1097:2910], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01khtg84wm1ac8ar721daymevf. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-02-19T08:28:48.598216Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [2:1097:2910], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01khtg84wm1ac8ar721daymevf. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1091:2882], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-02-19T08:28:48.598320Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.599246Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.599324Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1098:2911], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01khtg84wm1ac8ar721daymevf. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-02-19T08:28:48.599362Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [2:1098:2911], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01khtg84wm1ac8ar721daymevf. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1091:2882], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-02-19T08:28:48.599421Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-02-19T08:28:48.601112Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-02-19T08:28:48.601396Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-02-19T08:28:48.601448Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:69:2116] Handle TEvProposeTransaction 2026-02-19T08:28:48.601477Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:69:2116] TxId# 0 ProcessProposeTransaction 2026-02-19T08:28:48.601548Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:69:2116] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1132:2943] SnapshotReq marker# P0 2026-02-19T08:28:48.601818Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1134:2943] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-02-19T08:28:48.601949Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1134:2943] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-02-19T08:28:48.602042Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1132:2943] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-02-19T08:28:48.602137Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-02-19T08:28:48.602221Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553190, Sender [2:1132:2943], Recipient [2:893:2771]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-02-19T08:28:48.602397Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:28:48.602448Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:1131:2882] TxId: 281474976715664. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:28:48.602574Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1131:2882] TxId: 281474976715664. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:48.602617Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1131:2882] TxId: 281474976715664. Ctx: { TraceId: 01khtg84wm1ac8ar721daymevf, Database: , SessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:28:48.602774Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 3, sender: [2:836:2730], selfId: [2:67:2114], source: [2:1064:2882] 2026-02-19T08:28:48.603310Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=2&id=NzUxMzNhYzYtZWUzODc2MWYtYTczMGFlYzAtY2VkN2QyNGI=, workerId: [2:1064:2882], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } EffectivePoolId: "default" } YdbStatus: ABORTED ConsumedRu: 253 >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> DataShardOutOfOrder::LocksBrokenStats [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestWrite200KDeleteThenResponseError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:41.685484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:41.781850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:41.798870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:41.798947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:41.799405Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:42.024963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:42.025085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:42.094731Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489719409467 != 1771489719409471 2026-02-19T08:28:42.102945Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.147559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:42.234326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:42.508640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:42.521779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:42.624777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:42.656856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.657720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.657972Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-02-19T08:28:42.658189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.665115Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.665673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.693776Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.694013Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-02-19T08:28:42.694199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.702032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.702670Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.702825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.704425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:42.704488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:42.704591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:42.704920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.705074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.705137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2773] in generation 1 2026-02-19T08:28:42.705406Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.705481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.706762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:28:42.706825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:28:42.706874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:28:42.707119Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.707222Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.707325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:930:2775] in generation 1 2026-02-19T08:28:42.717976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.744904Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:42.745040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.745114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:933:2794] 2026-02-19T08:28:42.745141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:42.745164Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:42.745186Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:42.745367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:897:2773], Recipient [1:897:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.745400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.745473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.745499Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:28:42.745531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.745567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:934:2795] 2026-02-19T08:28:42.745585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:28:42.745598Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:28:42.745614Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:28:42.745867Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:900:2775], Recipient [1:900:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.745902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.746015Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:42.746069Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:42.746182Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:42.746242Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.746269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:42.746293Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:42.746314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:42.746335Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:42.746363Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:42.746404Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:28:42.746443Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:28:42.746522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:903:2776], Recipient [1:897:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.746550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.746589Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:903:2776], sessionId# [0:0:0] 2026-02-19T08:28:42.746625Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:28:42.746642Z node 1 :TX_DAT ... iteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-02-19T08:28:43.681024Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=1 2026-02-19T08:28:43.681088Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:28:43.681159Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:43.681197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:28:43.681261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:28:43.681288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:43.681359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:43.681388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:28:43.681419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:43.681447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:43.681480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:43.681501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:43.681552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-02-19T08:28:43.692291Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:28:43.692352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:43.692397Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:28:43.692476Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:43.692984Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [1:1169:2950] TxId: 281474976715662. Ctx: { TraceId: 01khtg8170ejafjyyb7aprrh6e, Database: , SessionId: ydb://session/3?node_id=1&id=OWRiODg0YS0zZjhjNjcwMC1lYWZjOTdhOC05Mjg4MWFjYg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:43.693056Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [1:1169:2950] TxId: 281474976715662. Ctx: { TraceId: 01khtg8170ejafjyyb7aprrh6e, Database: , SessionId: ydb://session/3?node_id=1&id=OWRiODg0YS0zZjhjNjcwMC1lYWZjOTdhOC05Mjg4MWFjYg==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ... waiting for SysViewsRoster update finished 2026-02-19T08:28:46.930730Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:46.935936Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:46.940300Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:46.940641Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:46.940698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:47.187562Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:47.187687Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:47.211091Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489724175425 != 1771489724175429 2026-02-19T08:28:47.214086Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:47.257694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:47.338204Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:47.602060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:47.615950Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:47.719540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:47.956621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:48.173171Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1032:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.173284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1043:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.173699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.174478Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1048:2882], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.174903Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.179120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:48.365278Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1046:2880], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:48.400989Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1105:2920] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for readsets 2026-02-19T08:28:49.081253Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1318: SelfId: [2:1187:2959], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1165:2959]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-02-19T08:28:49.090901Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [2:1179:2959], SessionActorId: [2:1165:2959], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:1165:2959]. 2026-02-19T08:28:49.091181Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1114: TxId: 281474976715664, task: 1, CA Id [2:1213:2997]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-02-19T08:28:49.091837Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=2&id=MjQzOGQ4NmQtMWQ5N2M4NmItOWI5NDY4Y2UtNTM4ZDc5NmU=, ActorId: [2:1165:2959], ActorState: ExecuteState, LegacyTraceId: 01khtg861a5gwnss4gkzqrfg1k, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:1180:2959] from: [2:1179:2959] trace_id# 2026-02-19T08:28:49.092039Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:1180:2959] TxId: 281474976715663. Ctx: { TraceId: 01khtg861a5gwnss4gkzqrfg1k, Database: /Root, SessionId: ydb://session/3?node_id=2&id=MjQzOGQ4NmQtMWQ5N2M4NmItOWI5NDY4Y2UtNTM4ZDc5NmU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-02-19T08:28:49.092628Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=MjQzOGQ4NmQtMWQ5N2M4NmItOWI5NDY4Y2UtNTM4ZDc5NmU=, ActorId: [2:1165:2959], ActorState: ExecuteState, LegacyTraceId: 01khtg861a5gwnss4gkzqrfg1k, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> DataShardTxOrder::DelayData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:40.076729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:40.179168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:40.199548Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:40.199624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:40.200075Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:40.449789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:40.449912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:40.523509Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489717775705 != 1771489717775709 2026-02-19T08:28:40.532258Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:40.578308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:40.666852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:40.952380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:40.965474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:41.068501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:41.325997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:41.537872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:41.537998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:41.538098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:41.539035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:41.539254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:41.543674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:41.683415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:41.758593Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2917] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-02-19T08:28:43.421266Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=M2FlZTQ2NS02NTBmOGYyYS1mN2JkYjkzMy02ZTJmMmU0ZQ==, ActorId: [1:1171:2965], ActorState: ExecuteState, LegacyTraceId: 01khtg80z45yj793yd4ztbs4tm, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-02-19T08:28:46.563694Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:46.568792Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:46.573277Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:46.573632Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:46.573692Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:46.817817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:46.817932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:46.843073Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489723948855 != 1771489723948859 2026-02-19T08:28:46.846382Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:46.890856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:46.969756Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:47.242464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:47.254824Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:47.356809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:47.590160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:47.803073Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:47.803176Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:47.803258Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:47.804282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:47.804493Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:47.808745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:47.993572Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:48.028788Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1103:2918] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-02-19T08:28:49.456117Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=Njg2ZjgwYzYtYzMwMDRlZDEtZTJhMDZmMGUtZTA3ZDJhNjY=, ActorId: [2:1166:2968], ActorState: ExecuteState, LegacyTraceId: 01khtg86v5806spqqmeg9ghyet, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:42.035684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:42.125309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:42.141760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:42.141826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:42.142136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:42.370648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:42.370768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:42.427922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489719931093 != 1771489719931097 2026-02-19T08:28:42.434737Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.476098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:42.567548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:42.827501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:42.839957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:42.940760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:42.967991Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.968689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.968888Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:42.969046Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.977407Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:43.004163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:43.004264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:43.005472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:43.005522Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:43.005594Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:43.005865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:43.005966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:43.006016Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:43.016533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:43.039009Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:43.039135Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:43.039210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:43.039232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:43.039275Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:43.039301Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:43.039456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:43.039486Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:43.039676Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:43.039734Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:43.039855Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:43.039887Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:43.039920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:43.039949Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:43.039982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:43.040003Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:43.040032Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:43.040097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:43.040129Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:43.040156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:43.040215Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:43.040247Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:43.040322Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:43.040519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:43.040570Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:43.040653Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:43.040684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:43.040707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:43.040729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:43.040752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:43.040969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:43.040993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:43.041043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:43.041066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:43.041093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:43.041145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:43.041173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:43.041195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:43.041212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:43.042402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:43.042437Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:43.053013Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 3000} 2026-02-19T08:28:49.181094Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3254: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-02-19T08:28:49.181122Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-02-19T08:28:49.181148Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-02-19T08:28:49.181228Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877764, Sender [2:1371:3115], Recipient [2:1228:3012]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:28:49.181256Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3253: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:28:49.181286Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1369:3113], serverId# [2:1371:3115], sessionId# [0:0:0] 2026-02-19T08:28:49.181654Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1228:3012]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-02-19T08:28:49.181692Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3255: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-02-19T08:28:49.181737Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 3000 2026-02-19T08:28:49.181771Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2830: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-02-19T08:28:49.181811Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2864: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-02-19T08:28:49.192740Z node 2 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037893 ack snapshot OpId 281474976715665 2026-02-19T08:28:49.192876Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037893 2026-02-19T08:28:49.192973Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:28:49.193044Z node 2 :TX_DATASHARD INFO: datashard.cpp:1315: Change sender killed: at tablet: 72075186224037893 2026-02-19T08:28:49.193097Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037893, actorId: [2:1379:3123] 2026-02-19T08:28:49.193123Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037893 2026-02-19T08:28:49.193157Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037893 2026-02-19T08:28:49.193185Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-02-19T08:28:49.193329Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:492: 72075186224037888 ack split to schemeshard 281474976715664 2026-02-19T08:28:49.194076Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553157, Sender [2:1231:3015], Recipient [2:976:2829]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2026-02-19T08:28:49.194143Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:468: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2026-02-19T08:28:49.194474Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1231:3015], Recipient [2:1231:3015]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:49.194511Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:49.194722Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877763, Sender [2:1370:3114], Recipient [2:976:2829]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1370:3114] ServerId: [2:1372:3116] } 2026-02-19T08:28:49.194759Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3250: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-02-19T08:28:49.195095Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1231:3015]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 3000} 2026-02-19T08:28:49.195132Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3254: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-02-19T08:28:49.195162Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 3000 2026-02-19T08:28:49.195191Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-02-19T08:28:49.195651Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-02-19T08:28:49.195690Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:49.195718Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2026-02-19T08:28:49.195744Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2026-02-19T08:28:49.195770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2026-02-19T08:28:49.195796Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2026-02-19T08:28:49.195829Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-02-19T08:28:49.195936Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877764, Sender [2:1372:3116], Recipient [2:1231:3015]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:28:49.195968Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3253: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:28:49.196001Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3764: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1370:3114], serverId# [2:1372:3116], sessionId# [0:0:0] 2026-02-19T08:28:49.197384Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1231:3015]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-02-19T08:28:49.197426Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3255: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-02-19T08:28:49.197457Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 3000 2026-02-19T08:28:49.197495Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2830: CheckMediatorStateRestored at 72075186224037893: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-02-19T08:28:49.197541Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2864: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-02-19T08:28:49.198331Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553158, Sender [2:405:2404], Recipient [2:894:2771] 2026-02-19T08:28:49.198398Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:572: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2026-02-19T08:28:49.200528Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:539: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2026-02-19T08:28:49.200598Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:28:49.200999Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [2:883:2764], Recipient [2:892:2770]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:28:49.211679Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:492: 72075186224037889 ack split to schemeshard 281474976715665 2026-02-19T08:28:49.213451Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553158, Sender [2:405:2404], Recipient [2:978:2830] 2026-02-19T08:28:49.213492Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:572: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-02-19T08:28:49.214881Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:539: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2026-02-19T08:28:49.214935Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-02-19T08:28:49.215315Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 268828683, Sender [2:968:2824], Recipient [2:976:2829]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-02-19T08:28:49.633113Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [2:1176:2869], Recipient [2:892:2770]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1176 RawX2: 8589937461 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\230\004\000\000\000\000\000\000\0215\013\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? L\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2026-02-19T08:28:49.633179Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:49.633274Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3115: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-02-19T08:28:49.633627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2026-02-19T08:28:49.634034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2026-02-19T08:28:40.795265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:40.795315Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:40.796154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:40.809093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:40.809436Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:40.809696Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:40.853025Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:40.859560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:40.859741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:40.861156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:40.861230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:40.861282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:40.861671Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:40.861766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:40.861848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:40.926458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:40.956343Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:40.956534Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:40.956645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:40.956700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:40.956737Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:40.956768Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.956912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.956959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.957346Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:40.957452Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:40.957565Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.957604Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:40.957655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:40.957690Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:40.957943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:40.957995Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:40.958045Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:40.958161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.958211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.958252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:40.964676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:40.964738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:40.964813Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:40.964964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:40.965021Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:40.965083Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:40.965129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:40.965164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:40.965199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:40.965233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.965482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:40.965514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:40.965548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:40.965579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.965651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:40.965674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:40.965702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:40.965745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.965773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:40.977853Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:40.977946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.977996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.978039Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:40.978113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:40.978596Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.978653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.978695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:40.978822Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:40.978882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:40.979040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.979084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:40.979118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:40.979151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:40.983335Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:40.983407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.983616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.983658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.983710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.983750Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:40.983784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:40.983828Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:40.983863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2026-02-19T08:28:51.047975Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:51.047996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:51.048020Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2026-02-19T08:28:51.048333Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:238:2229], Recipient [1:238:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:51.048370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:51.048425Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:51.048457Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:51.048483Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2026-02-19T08:28:51.048520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2026-02-19T08:28:51.048540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.048562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2026-02-19T08:28:51.048597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2026-02-19T08:28:51.048614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2026-02-19T08:28:51.049050Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2026-02-19T08:28:51.049082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2026-02-19T08:28:51.049119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2026-02-19T08:28:51.049141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2026-02-19T08:28:51.049162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2026-02-19T08:28:51.049187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:51.049199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2026-02-19T08:28:51.049229Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2026-02-19T08:28:51.049244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2026-02-19T08:28:51.049270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2026-02-19T08:28:51.049300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:51.049328Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2026-02-19T08:28:51.049340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2026-02-19T08:28:51.049362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2026-02-19T08:28:51.049386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2026-02-19T08:28:51.049397Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2026-02-19T08:28:51.049421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2026-02-19T08:28:51.049460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2026-02-19T08:28:51.049490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2026-02-19T08:28:51.049507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2026-02-19T08:28:51.049535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2026-02-19T08:28:51.049547Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2026-02-19T08:28:51.049559Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2026-02-19T08:28:51.049589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2026-02-19T08:28:51.049605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2026-02-19T08:28:51.049628Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.049653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2026-02-19T08:28:51.049672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2026-02-19T08:28:51.049685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2026-02-19T08:28:51.049908Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2026-02-19T08:28:51.049938Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:51.049978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:51.049998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2026-02-19T08:28:51.050034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2026-02-19T08:28:51.050061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.050144Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is DelayComplete 2026-02-19T08:28:51.050166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2026-02-19T08:28:51.050189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2026-02-19T08:28:51.050205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2026-02-19T08:28:51.050223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-02-19T08:28:51.050235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2026-02-19T08:28:51.050274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:507] at 9437184 has finished 2026-02-19T08:28:51.050294Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:51.050322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:51.050342Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:51.050366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:51.063465Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2026-02-19T08:28:51.063534Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-02-19T08:28:51.063581Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.063636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.063681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:28:51.063718Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:51.063830Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.063847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.063876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:28:51.063897Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2026-02-19T08:28:45.141606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:45.141658Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:45.142516Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:45.155228Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:45.155591Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:45.155847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:45.201914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:45.208067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:45.208196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:45.209331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:45.209386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:45.209471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:45.209779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:45.209842Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:45.209920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:45.265373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:45.290353Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:45.290492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:45.290595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:45.290633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:45.290659Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:45.290685Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:45.290800Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.290828Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.291058Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:45.291127Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:45.291209Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:45.291238Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:45.291272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:45.291312Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:45.291352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:45.291381Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:45.291411Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:45.291479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.291508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.291535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:45.296871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:45.296921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:45.296992Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:45.297145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:45.297217Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:45.297274Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:45.297303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:45.297330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:45.297353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:45.297376Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:45.297627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:45.297652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:45.297703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:45.297755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:45.297800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:45.297831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:45.297870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:45.297902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:45.297921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:45.309886Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:45.309963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:45.310024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:45.310064Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:45.310137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:45.310619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.310668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.310709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:45.310870Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:45.310905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:45.311057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:45.311111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:45.311168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:45.311203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:45.315156Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:45.315220Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:45.315428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.315482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.315546Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:45.315583Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:45.315614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:45.315666Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:45.315703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [1:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:28:50.861181Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:50.861208Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:50.861375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:28:50.861418Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.861452Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-02-19T08:28:50.861554Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:50.861591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-02-19T08:28:50.861629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 2 ms 2026-02-19T08:28:50.861672Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:50.861728Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:50.861848Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:50.862076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:28:50.862110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.862142Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-02-19T08:28:50.862205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [1:238:2229], Recipient [1:459:2400]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-02-19T08:28:50.862233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:50.862262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-02-19T08:28:50.862333Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-02-19T08:28:50.862388Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-02-19T08:28:50.862444Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-02-19T08:28:50.862565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:50.862602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.862646Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:28:50.862732Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:50.862760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.862785Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:28:50.862843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:459:2400], Recipient [1:459:2400]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:50.862894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:50.862942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-02-19T08:28:50.862972Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:28:50.863011Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-02-19T08:28:50.863046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-02-19T08:28:50.863076Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:50.863105Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-02-19T08:28:50.863134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-02-19T08:28:50.863175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-02-19T08:28:50.863202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:50.863236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-02-19T08:28:50.863264Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-02-19T08:28:50.863288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-02-19T08:28:50.863738Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-02-19T08:28:50.863801Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:50.863845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-02-19T08:28:50.863871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-02-19T08:28:50.863897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-02-19T08:28:50.863922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:50.864096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-02-19T08:28:50.864124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-02-19T08:28:50.864150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-02-19T08:28:50.864177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-02-19T08:28:50.864227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-02-19T08:28:50.864280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-02-19T08:28:50.864321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-02-19T08:28:50.864353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:50.864384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-02-19T08:28:50.864419Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-02-19T08:28:50.864447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-02-19T08:28:50.864674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:50.864724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.864760Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-02-19T08:28:50.878463Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:50.878528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:50.878589Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 1 ms, propose latency: 3 ms 2026-02-19T08:28:50.878656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:50.878693Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:50.878972Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:50.879013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:50.879043Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:43.194129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:43.308936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:43.327765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:43.327847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:43.328386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:43.615789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:43.615908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:43.691878Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489720376541 != 1771489720376545 2026-02-19T08:28:43.701253Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:43.745916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:43.846280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:44.117955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:44.131053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:44.232402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:44.265309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:44.266350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:44.266610Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:44.266820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:44.276156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:44.314040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:44.314167Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:44.315975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:44.316050Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:44.316119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:44.316568Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:44.316702Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:44.316785Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:44.327560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:44.363522Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:44.363723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:44.363854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:44.363906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:44.363966Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:44.364001Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:44.364236Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:44.364287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:44.364613Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:44.364706Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:44.364883Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:44.364926Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:44.364969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:44.365013Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:44.365067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:44.365100Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:44.365145Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:44.365227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:44.365277Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:44.365320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:44.365431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:44.365495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:44.365622Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:44.365883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:44.365962Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:44.366067Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:44.366113Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:44.366150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:44.366184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:44.366218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:44.366601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:44.366644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:44.366694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:44.366733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:44.366823Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:44.366865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:44.366902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:44.366937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:44.366970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:44.368787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:44.368833Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:44.379556Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 893Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:28:50.530920Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2026-02-19T08:28:50.530945Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:50.530969Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:50.531017Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-02-19T08:28:50.531076Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715664] at 72075186224037888 2026-02-19T08:28:50.531126Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:28:50.531154Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:50.531175Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:28:50.531199Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:28:50.531237Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:28:50.531266Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:28:50.531288Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-02-19T08:28:50.531311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2026-02-19T08:28:50.531364Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:274: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-02-19T08:28:50.531447Z node 2 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-02-19T08:28:50.531531Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:520: add locks to result: 0 2026-02-19T08:28:50.531587Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:28:50.531613Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-02-19T08:28:50.531650Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:50.531676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:50.531720Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-02-19T08:28:50.531774Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2026-02-19T08:28:50.531807Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:50.531842Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:50.531876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:50.531914Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-02-19T08:28:50.531940Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:50.531964Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2026-02-19T08:28:50.532023Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:28:50.532069Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:50.532114Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:50.533448Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [2:71:2118], Recipient [2:892:2770]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2026-02-19T08:28:50.533837Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1124:2934], Recipient [2:892:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:50.533882Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:50.533929Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1123:2933], serverId# [2:1124:2934], sessionId# [0:0:0] 2026-02-19T08:28:50.534033Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553169, Sender [2:1122:2932], Recipient [2:892:2770]: NKikimrTxDataShard.TEvGetInfoRequest 2026-02-19T08:28:50.534980Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1127:2937], Recipient [2:892:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:50.535031Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:50.535075Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:1126:2936], serverId# [2:1127:2937], sessionId# [0:0:0] 2026-02-19T08:28:50.535226Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1125:2935], Recipient [2:892:2770]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-02-19T08:28:50.535329Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:50.535376Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-02-19T08:28:50.535422Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-02-19T08:28:50.535498Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:50.535604Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:50.535646Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:50.535680Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:50.535720Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:50.535760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-02-19T08:28:50.535797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:50.535824Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:50.535847Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:50.535868Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:50.535959Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-02-19T08:28:50.536164Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:1125:2935], 1000} after executionsCount# 1 2026-02-19T08:28:50.536240Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:1125:2935], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:50.536315Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:1125:2935], 1000} finished in read 2026-02-19T08:28:50.536371Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:50.536398Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:50.536423Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:50.536450Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:50.536486Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:50.536523Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:50.536551Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-02-19T08:28:50.536588Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:50.536670Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2026-02-19T08:28:46.280419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:46.280478Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:46.281311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:46.293543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:46.293888Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:46.294121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:46.342197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:46.348936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:46.349129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:46.350714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:46.350786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:46.350832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:46.351257Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:46.351338Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:46.351410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:46.411628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:46.443942Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:46.444126Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:46.444215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:46.444272Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:46.444309Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:46.444340Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.444495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:46.444565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:46.444866Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:46.444967Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:46.445090Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:46.445126Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:46.445175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:46.445210Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:46.445271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:46.445308Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:46.445376Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:46.445494Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:46.445540Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:46.445583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:46.452192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:46.452255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:46.452344Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:46.452513Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:46.452597Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:46.452666Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:46.452713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:46.452745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:46.452780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:46.452812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:46.453097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:46.453139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:46.453177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:46.453217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:46.453320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:46.453352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:46.453383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:46.453417Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:46.453443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:46.466525Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:46.466611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:46.466665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:46.466709Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:46.466776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:46.467519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:46.467578Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:46.467669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:46.467864Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:46.467920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:46.468066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:46.468125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:46.468162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:46.468200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:46.472339Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:46.472419Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:46.472637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:46.472675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:46.472723Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:46.472832Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:46.472866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:46.472903Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:46.472935Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:51.073114Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:51.073180Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:51.073196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-02-19T08:28:51.073216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.073238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:51.073252Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:51.073317Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:28:51.073339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-02-19T08:28:51.073360Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.073384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:51.073407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:28:51.073580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-02-19T08:28:51.073606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.073630Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-02-19T08:28:51.073696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-02-19T08:28:51.073735Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.073762Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-02-19T08:28:51.073861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-02-19T08:28:51.073879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.073906Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-02-19T08:28:51.073956Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-02-19T08:28:51.073984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.074010Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-02-19T08:28:51.074082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-02-19T08:28:51.074099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.074115Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-02-19T08:28:51.074168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-02-19T08:28:51.074196Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.074211Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-02-19T08:28:51.074264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:459:2400], Recipient [1:238:2229]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-02-19T08:28:51.074279Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.074293Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-02-19T08:28:51.074331Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.074363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.074402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.074436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:28:51.074459Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:51.074558Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.074578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.074631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.074692Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:28:51.074737Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:51.074860Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.074892Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.074943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.074999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:51.075021Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:51.075123Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:51.075142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-02-19T08:28:51.075165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:104:2137], exec latency: 2 ms, propose latency: 3 ms 2026-02-19T08:28:51.075196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:51.075212Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:51.075320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-02-19T08:28:51.075341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.075361Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-02-19T08:28:51.075461Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-02-19T08:28:51.075484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.075500Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-02-19T08:28:51.075567Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-02-19T08:28:51.075584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.075598Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-02-19T08:28:51.075631Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:238:2229], Recipient [1:348:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-02-19T08:28:51.075648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:51.075662Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:40.989469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:41.098065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:41.114178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:41.114260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:41.114657Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:41.387478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:41.387632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:41.455772Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489718625838 != 1771489718625842 2026-02-19T08:28:41.461969Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:41.502572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:41.588663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:41.887585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:41.901395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:42.003857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:42.041395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.042439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.042742Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:42.042990Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.052820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.083269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.083422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.085145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:42.085231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:42.085286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:42.085759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.085887Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.085975Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:42.096654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.138251Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:42.138451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.138575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:42.138619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:42.138664Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:42.138719Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:42.138947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.138998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.139337Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:42.139440Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:42.139609Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:42.139664Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.139705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:42.139743Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:42.139779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:42.139830Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:42.139877Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:42.139968Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.140032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.140080Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:42.140169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:42.140230Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.140334Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:42.140610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:42.140676Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:42.140788Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:42.140846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.140886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:42.140924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:42.140960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:42.141299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.141351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:42.141407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:42.141451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:42.141501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:42.141539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:42.141624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:42.141674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:42.141724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.143544Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:42.143642Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:42.154261Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... ledNodes# 0 trace_id# 2026-02-19T08:28:51.531388Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:282} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-02-19T08:28:51.531477Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:28:51.531785Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1419:3141] 2026-02-19T08:28:51.531865Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1419:3141], channels: 1 2026-02-19T08:28:51.531962Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:28:51.532034Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1419:3141], trace_id# 2026-02-19T08:28:51.532106Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1419:3141], trace_id# 2026-02-19T08:28:51.532162Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:28:51.532986Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1419:3141] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:28:51.533062Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1419:3141], trace_id# 2026-02-19T08:28:51.533124Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1419:3141], trace_id# 2026-02-19T08:28:51.533568Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1420:3141], Recipient [2:1356:3107]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-02-19T08:28:51.533694Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:51.533765Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v5001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v5001/18446744073709551615 2026-02-19T08:28:51.533810Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v6000/18446744073709551615 2026-02-19T08:28:51.533869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:51.533963Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:51.534006Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:51.534044Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:51.534079Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:51.534124Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-02-19T08:28:51.534168Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:51.534198Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:51.534223Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:51.534246Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:51.534346Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:28:51.534561Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:1420:3141], 0} after executionsCount# 1 2026-02-19T08:28:51.534620Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:1420:3141], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:51.534697Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:1420:3141], 0} finished in read 2026-02-19T08:28:51.534760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:51.534784Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:51.534805Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:51.534826Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:51.534868Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-02-19T08:28:51.534889Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:51.534913Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-02-19T08:28:51.534952Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:51.535809Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1420:3141], Recipient [2:1356:3107]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:51.535869Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:28:51.536079Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1419:3141] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 774 Tasks { TaskId: 1 CpuTimeUs: 228 FinishTimeMs: 1771489731535 OutputRows: 1 OutputBytes: 6 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 86 BuildCpuTimeUs: 142 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489731535 CreateTimeMs: 1771489731532 UpdateTimeMs: 1771489731535 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:51.536155Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1419:3141] 2026-02-19T08:28:51.536285Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:51.536348Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1415:3141] TxId: 281474976715671. Ctx: { TraceId: 01khtg88tr20trmz1m6xsadtqf, Database: , SessionId: ydb://session/3?node_id=2&id=NzgyMjRjNTItYjk5MjdiOWYtYTE2OTNkMDAtMTZiMWZjZTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000774s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 7 } items { uint32_value: 4 } } >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:44.499400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:44.585515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:44.601908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:44.601990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:44.602316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:44.799865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:44.799975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:44.851776Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489722224667 != 1771489722224671 2026-02-19T08:28:44.860790Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:44.901779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:44.997907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:45.276707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:45.290332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:45.393721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:45.426066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:45.427332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:45.427643Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:45.427913Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:45.438606Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:45.467660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:45.467784Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:45.469332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:45.469420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:45.469463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:45.469846Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:45.469980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:45.470060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:45.480810Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:45.510788Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:45.510978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:45.511100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:45.511138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:45.511195Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:45.511236Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:45.511457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.511506Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.511791Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:45.511861Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:45.511930Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:45.511972Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:45.512018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:45.512048Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:45.512092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:45.512124Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:45.512172Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:45.512259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.512300Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.512339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:45.512429Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:45.512464Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:45.512578Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:45.512830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:45.512884Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:45.512964Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:45.513011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:45.513057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:45.513090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:45.513166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:45.513419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:45.513449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:45.513495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:45.513532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:45.513575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:45.513599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:45.513629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:45.513665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:45.513697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:45.515339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:45.515380Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:45.526058Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... iting for: CA [2:1144:2839], trace_id# 2026-02-19T08:28:51.729845Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1144:2839], trace_id# 2026-02-19T08:28:51.729913Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:28:51.730880Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1144:2839] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:28:51.730943Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1144:2839], trace_id# 2026-02-19T08:28:51.730997Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1144:2839], trace_id# 2026-02-19T08:28:51.731810Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1144:2839] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 808 Tasks { TaskId: 1 CpuTimeUs: 334 FinishTimeMs: 1771489731731 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 316 HostName: "ghrun-fkrokuvqke" NodeId: 2 CreateTimeMs: 1771489731730 UpdateTimeMs: 1771489731731 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:51.731887Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1144:2839] 2026-02-19T08:28:51.731955Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:223} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1140:2839] trace_id# 2026-02-19T08:28:51.732016Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000808s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-02-19T08:28:51.732555Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 278003712, Sender [2:1146:2839], Recipient [2:898:2773]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:28:51.732604Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-02-19T08:28:51.732718Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435074, Sender [2:898:2773], Recipient [2:898:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:28:51.732752Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3243: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-02-19T08:28:51.732822Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-02-19T08:28:51.732947Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-02-19T08:28:51.733019Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:260: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 5) 2026-02-19T08:28:51.733067Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 5) table: [72057594046644480:38:1] 2026-02-19T08:28:51.733141Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckWrite 2026-02-19T08:28:51.733185Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:51.733219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckWrite 2026-02-19T08:28:51.733251Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:51.733281Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:51.733320Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-02-19T08:28:51.733379Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-02-19T08:28:51.733442Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:51.733468Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:51.733491Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BlockFailPoint 2026-02-19T08:28:51.733512Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BlockFailPoint 2026-02-19T08:28:51.733537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:51.733558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BlockFailPoint 2026-02-19T08:28:51.733577Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteWrite 2026-02-19T08:28:51.733597Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:28:51.733625Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:4] at 72075186224037888 2026-02-19T08:28:51.733669Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-02-19T08:28:51.733824Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-02-19T08:28:51.733887Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:110: add locks to result: 0 2026-02-19T08:28:51.733948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:51.733989Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:28:51.734022Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:28:51.734056Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:51.734128Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:51.734154Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:28:51.734183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:51.734212Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:51.734249Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:51.734274Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:51.734300Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-02-19T08:28:51.745062Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:28:51.745190Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:51.745247Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 4 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:28:51.745327Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:51.745853Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1141:2839] TxId: 281474976715662. Ctx: { TraceId: 01khtg892ragaxxweampbgv8s4, Database: , SessionId: ydb://session/3?node_id=2&id=ZWMzYzU3MmEtNDJiZmExMDItYzZkMTNkZDctMzM2NTAwOWI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:46.671961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:46.672783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:46.673453Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:28:46.758198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:46.759728Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:46.767118Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:46.767414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:46.767651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:46.769183Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:46.769290Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:47.156428Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:47.262769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:47.262899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:47.263659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:47.263778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:47.308110Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:47.308751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:47.309166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:47.385866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:47.435144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:48.017561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:48.084092Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:48.084205Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:48.427810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:48.485686Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [2:1599:2386], Recipient [2:1625:2399]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:48.489502Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [2:1599:2386], Recipient [2:1625:2399]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:48.489800Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1625:2399] 2026-02-19T08:28:48.489982Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:48.498745Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [2:1599:2386], Recipient [2:1625:2399]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:48.531988Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:48.532280Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:48.533696Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:48.533769Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:48.533811Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:48.534148Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:48.534366Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:48.534422Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [2:1649:2399] in generation 1 2026-02-19T08:28:48.536599Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:48.560920Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:48.561069Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:48.561160Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [2:1653:2416] 2026-02-19T08:28:48.561200Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:48.561233Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:48.561260Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:48.561503Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1625:2399], Recipient [2:1625:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.561541Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.561781Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:48.561849Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:48.561989Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:48.562024Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:48.562091Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:48.562131Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:48.562159Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:48.562184Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:48.562223Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:48.562306Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1622:2397], Recipient [2:1625:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:48.562339Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:48.562378Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:1608:3057], serverId# [2:1622:2397], sessionId# [0:0:0] 2026-02-19T08:28:48.563028Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:825:2465], Recipient [2:1622:2397] 2026-02-19T08:28:48.563068Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:48.563147Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:48.563366Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:48.563412Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:48.563481Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-02-19T08:28:48.563511Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:48.563540Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:48.563566Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:48.563602Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:48.563854Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 720751862240 ... iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:51.110555Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:51.111153Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:2009:2454], Recipient [2:1625:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:51.111190Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-02-19T08:28:51.194266Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:2025:2455], Recipient [2:1625:2399]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-02-19T08:28:51.194391Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:51.194439Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:51.194503Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:51.194533Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:51.194558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:51.194580Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:51.194611Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-02-19T08:28:51.194666Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:51.194687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:51.194702Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:51.194721Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:51.194800Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:51.194978Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976710662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:51.195012Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-02-19T08:28:51.195039Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:2025:2455], 0} after executionsCount# 1 2026-02-19T08:28:51.195071Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:2025:2455], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:51.195117Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:2025:2455], 0} finished in read 2026-02-19T08:28:51.195186Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:51.195204Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:51.195221Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:51.195252Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:51.195282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-02-19T08:28:51.195297Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:51.195312Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-02-19T08:28:51.195335Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:51.195409Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:51.196014Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:2025:2455], Recipient [2:1625:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:51.196088Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2026-02-19T08:28:51.299525Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:2047:2456], Recipient [2:1625:2399]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-02-19T08:28:51.299679Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:51.299727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:51.299790Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-02-19T08:28:51.299818Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:51.299842Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:51.299879Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:51.299913Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2026-02-19T08:28:51.299948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-02-19T08:28:51.299968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:51.299993Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:51.300010Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:51.300094Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-02-19T08:28:51.300307Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2729: 72075186224037888 Acquired lock# 281474976710666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:28:51.300347Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2494: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-02-19T08:28:51.300375Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:2047:2456], 0} after executionsCount# 1 2026-02-19T08:28:51.300431Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:2047:2456], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:51.300522Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:2047:2456], 0} finished in read 2026-02-19T08:28:51.300577Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-02-19T08:28:51.300607Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:51.300648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:51.300678Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:51.300710Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-02-19T08:28:51.300724Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:51.300740Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 72075186224037888 has finished 2026-02-19T08:28:51.300763Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:51.300823Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:28:51.301476Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:2047:2456], Recipient [2:1625:2399]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:51.301528Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:28:51.302405Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 275709965, Sender [2:263:2141], Recipient [2:1625:2399]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2026-02-19T08:28:53.428152Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:53.429526Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:503} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:44.404765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:44.517314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:44.536911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:44.536988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:44.537392Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:44.812004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:44.812132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:44.881047Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489722263362 != 1771489722263366 2026-02-19T08:28:44.889833Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:44.933478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:45.019050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:45.308772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:45.322116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:45.427918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:45.463782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:45.464937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:45.465228Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:28:45.465489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:45.475995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:45.516620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:45.516771Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:45.518622Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:45.518706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:45.518763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:45.519239Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:45.519386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:45.519468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:28:45.530207Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:45.585803Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:45.585997Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:45.586150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:28:45.586196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:45.586245Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:45.586291Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:45.586505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.586554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:45.586868Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:45.586958Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:45.587112Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:45.587157Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:45.587200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:45.587240Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:45.587277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:45.587309Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:45.587367Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:45.587452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.587514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:45.587563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:28:45.587653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:28:45.587704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:45.587821Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:45.588073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:45.588155Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:45.588255Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:28:45.588310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:45.588365Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:45.588404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:45.588440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:45.588806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:45.588850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:28:45.588900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:28:45.588939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:28:45.588984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:28:45.589022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:28:45.589055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:28:45.589142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:28:45.589169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:45.590944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:28:45.591002Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:28:45.601806Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... ledNodes# 0 trace_id# 2026-02-19T08:28:53.636291Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:282} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-02-19T08:28:53.636362Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-02-19T08:28:53.636662Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1375:3102] 2026-02-19T08:28:53.636734Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1375:3102], channels: 1 2026-02-19T08:28:53.636823Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2748} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-02-19T08:28:53.636881Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1375:3102], trace_id# 2026-02-19T08:28:53.636945Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1375:3102], trace_id# 2026-02-19T08:28:53.636995Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2352} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-02-19T08:28:53.637653Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1375:3102] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-02-19T08:28:53.637733Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1375:3102], trace_id# 2026-02-19T08:28:53.637796Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1375:3102], trace_id# 2026-02-19T08:28:53.638157Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [2:1376:3102], Recipient [2:1292:3055]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-02-19T08:28:53.638266Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:28:53.638321Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v4000/18446744073709551615 2026-02-19T08:28:53.638360Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-02-19T08:28:53.638416Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-02-19T08:28:53.638489Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:53.638530Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:28:53.638568Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:53.638599Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:53.638643Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-02-19T08:28:53.638679Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:53.638702Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:53.638722Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:28:53.638743Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:28:53.638834Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:28:53.639020Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[2:1376:3102], 0} after executionsCount# 1 2026-02-19T08:28:53.639073Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[2:1376:3102], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:28:53.639139Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[2:1376:3102], 0} finished in read 2026-02-19T08:28:53.639193Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:53.639218Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:28:53.639240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:53.639263Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:53.639306Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-02-19T08:28:53.639328Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:53.639352Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-02-19T08:28:53.639391Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:28:53.639844Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [2:1376:3102], Recipient [2:1292:3055]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:28:53.639898Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-02-19T08:28:53.640379Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1375:3102] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 625 Tasks { TaskId: 1 CpuTimeUs: 174 FinishTimeMs: 1771489733639 OutputRows: 1 OutputBytes: 6 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 72 BuildCpuTimeUs: 102 HostName: "ghrun-fkrokuvqke" NodeId: 2 StartTimeMs: 1771489733639 CreateTimeMs: 1771489733637 UpdateTimeMs: 1771489733639 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:28:53.640468Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1375:3102] 2026-02-19T08:28:53.640617Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:28:53.640683Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [2:1371:3102] TxId: 281474976715672. Ctx: { TraceId: 01khtg8axj25ah9kxzc5jexqrf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDE5YzkzZjUtZmRiMzVjYzMtZjI1YmU3ZjYtN2QxZDhlYWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000625s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } } |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> TKeyValueTest::TestRenameWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:49.818627Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:49.819416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:49.820084Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:28:49.900479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:49.901599Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:49.905609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:49.905896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:49.906073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:49.907285Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:49.907413Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:50.212581Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:50.315999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:50.316115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:50.316735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:50.316812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:50.360650Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:50.361297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:50.361617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:50.436418Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:50.464023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:51.075417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:51.138740Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:51.138848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:51.478224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:51.522447Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [2:1603:2386], Recipient [2:1629:2399]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:51.525692Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [2:1603:2386], Recipient [2:1629:2399]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:51.525949Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1629:2399] 2026-02-19T08:28:51.526115Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:51.557760Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [2:1603:2386], Recipient [2:1629:2399]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:51.561463Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:51.562110Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:51.563366Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:51.563496Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:51.563543Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:51.563792Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:51.563961Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:51.564019Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [2:1653:2399] in generation 1 2026-02-19T08:28:51.566298Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:51.591796Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:51.591926Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:51.592003Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [2:1657:2416] 2026-02-19T08:28:51.592032Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:51.592054Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:51.592079Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:51.592354Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:1629:2399], Recipient [2:1629:2399]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:51.592403Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:51.592649Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:28:51.592731Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:28:51.592838Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:51.592885Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:51.592919Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:28:51.592947Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:51.592978Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:51.593004Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:28:51.593043Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:51.593211Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [2:1626:2397], Recipient [2:1629:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:51.593249Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:51.593305Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:1613:3061], serverId# [2:1626:2397], sessionId# [0:0:0] 2026-02-19T08:28:51.593620Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:825:2465], Recipient [2:1626:2397] 2026-02-19T08:28:51.593697Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:51.593842Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:51.594058Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:28:51.594104Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:28:51.594201Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-02-19T08:28:51.594251Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:28:51.594283Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:28:51.594329Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:28:51.594361Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:28:51.594594Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 720751862240 ... -02-19T08:28:54.653065Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:66:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037888 2026-02-19T08:28:54.653256Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:54.653285Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037888 on unit ExecuteWrite 2026-02-19T08:28:54.653313Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976710670 2026-02-19T08:28:54.653348Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976710670 2026-02-19T08:28:54.653373Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037888 on unit CompleteWrite 2026-02-19T08:28:54.653409Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:54.653459Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:28:54.653482Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:28:54.653499Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:28:54.654007Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [2:2367:2511], Recipient [2:2495:2541]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-02-19T08:28:54.654058Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:54.654085Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976710670 2026-02-19T08:28:54.654134Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-02-19T08:28:54.654461Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [2:2367:2511], Recipient [1:2479:3602] 2026-02-19T08:28:54.654493Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:54.654524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710670 2026-02-19T08:28:54.654565Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-02-19T08:28:54.654606Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:28:54.655090Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3503} 2026-02-19T08:28:54.655188Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-02-19T08:28:54.655214Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037890 on unit ExecuteWrite 2026-02-19T08:28:54.655259Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976710670 2026-02-19T08:28:54.655296Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976710670 2026-02-19T08:28:54.655319Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037890 on unit CompleteWrite 2026-02-19T08:28:54.655356Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2589:3628] 2026-02-19T08:28:54.655393Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:28:54.655445Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-02-19T08:28:54.655877Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [2:2495:2541], Recipient [2:2367:2511]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:28:54.655908Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:54.655932Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976710670 2026-02-19T08:28:54.655972Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-02-19T08:28:54.656055Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2587:3628] 2026-02-19T08:28:54.656365Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:54.657414Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:66:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037890 2026-02-19T08:28:54.657498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [2:2495:2541], Recipient [1:2479:3602] 2026-02-19T08:28:54.657526Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:28:54.657557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976710670 2026-02-19T08:28:54.657599Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-02-19T08:28:54.657679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2588:3628] 2026-02-19T08:28:54.658105Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:66:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3503} 2026-02-19T08:28:54.658445Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:28:54.664022Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-02-19T08:28:54.664147Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [2:2495:2541], Recipient [2:2367:2511]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2026-02-19T08:28:54.664182Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.664216Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2026-02-19T08:28:54.664301Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-02-19T08:28:54.664528Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-02-19T08:28:54.664725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [2:2495:2541], Recipient [1:2427:3561] 2026-02-19T08:28:54.664753Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.664785Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2026-02-19T08:28:54.665077Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:2427:3561], Recipient [2:2367:2511] 2026-02-19T08:28:54.665106Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.665135Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2026-02-19T08:28:54.665176Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [1:2427:3561], Recipient [2:2495:2541] 2026-02-19T08:28:54.665196Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.665221Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2026-02-19T08:28:54.665559Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-02-19T08:28:54.665628Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [2:2367:2511], Recipient [2:2495:2541]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-02-19T08:28:54.665654Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.665689Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 2026-02-19T08:28:54.665934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [2:2367:2511], Recipient [1:2427:3561] 2026-02-19T08:28:54.665965Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:28:54.665993Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:47.832149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:47.921327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:47.934084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:47.934144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:47.934488Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:48.143491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:48.143606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:48.196957Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489725485815 != 1771489725485819 2026-02-19T08:28:48.203446Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:48.245205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:48.326385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:48.602888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:48.616433Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:48.717262Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-02-19T08:28:48.717328Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976715657 ProcessProposeTransaction 2026-02-19T08:28:48.717436Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:867:2749] 2026-02-19T08:28:48.832071Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:867:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:28:48.832166Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:867:2749] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:28:48.832810Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:28:48.832919Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:867:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:28:48.833322Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:28:48.833450Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:867:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:28:48.833577Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:867:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-02-19T08:28:48.833877Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:867:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-02-19T08:28:48.835723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:48.836794Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:867:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-02-19T08:28:48.836858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:867:2749] txid# 281474976715657 SEND to# [1:835:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-02-19T08:28:48.868469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:48.869342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:48.869534Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-02-19T08:28:48.869702Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:48.878700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:48.879536Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:48.905366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:48.905635Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-02-19T08:28:48.905820Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:48.913176Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:48.913648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:48.913814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:48.915002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:28:48.915063Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:28:48.915105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:28:48.915349Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:48.915478Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:48.915553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2773] in generation 1 2026-02-19T08:28:48.915759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:48.915807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:48.916653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:28:48.916701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:28:48.916734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:28:48.916911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:48.916973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:48.917018Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:930:2775] in generation 1 2026-02-19T08:28:48.927749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:48.980124Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:28:48.980296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:48.980405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:933:2794] 2026-02-19T08:28:48.980441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:28:48.980475Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:28:48.980549Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:48.980806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:897:2773], Recipient [1:897:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.980850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:48.980916Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:48.980949Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:28:48.981003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:48.981050Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:934:2795] 2026-02-19T08:28:48.981080Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:28:48.981114Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:28:48.981136Z node 1 :TX ... eRestarts 2026-02-19T08:28:55.058902Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-02-19T08:28:55.058928Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-02-19T08:28:55.058956Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:55.059026Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:55.059047Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-02-19T08:28:55.059074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:28:55.059099Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:28:55.059130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-02-19T08:28:55.059146Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:28:55.059167Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-02-19T08:28:55.069911Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-02-19T08:28:55.069981Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-02-19T08:28:55.070022Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-02-19T08:28:55.070099Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:28:55.072104Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:69:2116] Handle TEvProposeTransaction 2026-02-19T08:28:55.072169Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:69:2116] TxId# 281474976715661 ProcessProposeTransaction 2026-02-19T08:28:55.072251Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:69:2116] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:1064:2882] DataReq marker# P0 2026-02-19T08:28:55.072411Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:1064:2882] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2026-02-19T08:28:55.072644Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:1064:2882] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2026-02-19T08:28:55.072898Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:1064:2882] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-02-19T08:28:55.072998Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:1064:2882] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2026-02-19T08:28:55.073333Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [2:1064:2882], Recipient [2:892:2770]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1064 RawX2: 8589937474 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020&\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t(\004\000\000\000\000\000\000\021B\013\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2026-02-19T08:28:55.073399Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:55.073517Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:28:55.073723Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:38:0] 2026-02-19T08:28:55.073831Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2026-02-19T08:28:55.073897Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-02-19T08:28:55.073948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2026-02-19T08:28:55.073993Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:55.074035Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:28:55.074084Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-02-19T08:28:55.074156Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715661] at 72075186224037888 2026-02-19T08:28:55.074211Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-02-19T08:28:55.074239Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:55.074265Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2026-02-19T08:28:55.074292Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2026-02-19T08:28:55.074323Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-02-19T08:28:55.074340Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2026-02-19T08:28:55.074355Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2026-02-19T08:28:55.074371Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-02-19T08:28:55.074404Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:1064:2882] for [0:281474976715661] at 72075186224037888 2026-02-19T08:28:55.074431Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-02-19T08:28:55.074477Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:28:55.074552Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2026-02-19T08:28:55.074618Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976715661 2026-02-19T08:28:55.074663Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2026-02-19T08:28:55.074785Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287942, Sender [2:1064:2882], Recipient [2:892:2770]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2026-02-19T08:28:55.074819Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3238: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2026-02-19T08:28:55.074963Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287940, Sender [2:1064:2882], Recipient [2:892:2770]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-02-19T08:28:55.075001Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3237: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-02-19T08:28:55.075078Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [2:892:2770], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:55.075117Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:55.075187Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:28:55.075233Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:28:55.075284Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:28:55.075331Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-02-19T08:28:55.075372Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715661] at 72075186224037888 2026-02-19T08:28:55.075407Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-02-19T08:28:55.075441Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2026-02-19T08:28:55.075496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2026-02-19T08:28:55.075523Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2026-02-19T08:28:55.075687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-02-19T08:28:55.075717Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:28:55.075760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-02-19T08:28:55.075798Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:28:55.075835Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:28:55.075908Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:28:55.076342Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435082, Sender [2:1069:2886], Recipient [2:892:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-02-19T08:28:55.076388Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3246: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> TKeyValueTest::TestObtainLockNewApi >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2026-02-19T08:28:56.712259Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:56.712321Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:272} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1771489736711 ErrorReason# 2026-02-19T08:28:56.718448Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:56.718515Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:242} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1771489736718 ErrorReason# 2026-02-19T08:28:56.722542Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-02-19T08:28:56.722591Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:224} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1771489736722 ErrorReason# >> DataShardTxOrder::ZigZag_oo [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [GOOD] >> KqpCompileFallback::FallbackToVersion1Success >> KqpCompileFallback::FallbackWithPreparedQuery >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2026-02-19T08:28:40.572348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:40.572397Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:40.573126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:40.588115Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:40.588476Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:40.588757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:40.624005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:40.633665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:40.633827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:40.635408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:40.635474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:40.635513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:40.635806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:40.635872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:40.635949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:40.705171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:40.737042Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:40.737232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:40.737338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:40.737397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:40.737456Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:40.737492Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.737657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.737722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.738018Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:40.738110Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:40.738216Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.738252Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:40.738300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:40.738353Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:40.738406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:40.738441Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:40.738485Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:40.738598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.738649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.738690Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:40.747738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:40.747808Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:40.747905Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:40.748060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:40.748127Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:40.748201Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:40.748252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:40.748298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:40.748333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:40.748365Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.748660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:40.748696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:40.748737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:40.748772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.748832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:40.748859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:40.748894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:40.748951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.748981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:40.761065Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:40.761131Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:40.761184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:40.761239Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:40.761353Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:40.761893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.761947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:40.761994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:40.762131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:40.762169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:40.762315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:40.762354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:40.762408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:40.762448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:40.766042Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:40.766117Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:40.766362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.766396Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:40.766447Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:40.766497Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:40.766541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:40.766576Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:40.766610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-02-19T08:28:57.855496Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:57.855640Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [6:344:2311], Recipient [6:344:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:57.855666Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:57.855698Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-02-19T08:28:57.855721Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:57.855741Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:28:57.855764Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-02-19T08:28:57.855783Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-02-19T08:28:57.855805Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.855823Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-02-19T08:28:57.855842Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-02-19T08:28:57.855861Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-02-19T08:28:57.856300Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-02-19T08:28:57.856333Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856353Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-02-19T08:28:57.856370Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-02-19T08:28:57.856388Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-02-19T08:28:57.856416Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856432Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-02-19T08:28:57.856463Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-02-19T08:28:57.856482Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-02-19T08:28:57.856514Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-02-19T08:28:57.856540Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-02-19T08:28:57.856560Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-02-19T08:28:57.856591Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856608Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-02-19T08:28:57.856624Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-02-19T08:28:57.856642Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-02-19T08:28:57.856671Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856685Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-02-19T08:28:57.856701Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-02-19T08:28:57.856717Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-02-19T08:28:57.856735Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856751Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-02-19T08:28:57.856766Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-02-19T08:28:57.856781Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-02-19T08:28:57.856799Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856815Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-02-19T08:28:57.856829Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-02-19T08:28:57.856845Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-02-19T08:28:57.856861Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856875Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-02-19T08:28:57.856889Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-02-19T08:28:57.856904Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-02-19T08:28:57.856922Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.856938Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-02-19T08:28:57.856953Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-02-19T08:28:57.856968Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-02-19T08:28:57.857205Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-02-19T08:28:57.857240Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:28:57.857276Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.857294Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-02-19T08:28:57.857310Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-02-19T08:28:57.857327Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-02-19T08:28:57.857443Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-02-19T08:28:57.857462Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-02-19T08:28:57.857481Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-02-19T08:28:57.857499Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-02-19T08:28:57.857519Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-02-19T08:28:57.857536Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-02-19T08:28:57.857554Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-02-19T08:28:57.857575Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:57.857594Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:28:57.857612Z node 6 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-02-19T08:28:57.857635Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-02-19T08:28:57.870660Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-02-19T08:28:57.870730Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-02-19T08:28:57.870794Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:28:57.870836Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-02-19T08:28:57.870900Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:57.870950Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-02-19T08:28:57.871279Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-02-19T08:28:57.871307Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-02-19T08:28:57.871336Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:57.871363Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-02-19T08:28:57.871404Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:28:57.871437Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:451:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:454:2057] recipient: [2:453:2379] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:456:2057] recipient: [2:453:2379] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:455:2380] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:571:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:451:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:454:2057] recipient: [3:453:2379] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:456:2057] recipient: [3:453:2379] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:455:2380] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:571:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:452:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:455:2057] recipient: [4:454:2379] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:457:2057] recipient: [4:454:2379] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:456:2380] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:572:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] >> KqpSystemView::PartitionStatsFollower [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: 2026-02-19T08:28:32.362486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490381677732745:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.362995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.837781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:32.837900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:32.861210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:32.873082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.963246Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:32.973050Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490381677732704:2081] 1771489712339893 != 1771489712339896 2026-02-19T08:28:33.094306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:33.109450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:33.109471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:33.109489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:33.109573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:33.334894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.697493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:33.815053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:33.815101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:33.815156Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:33.815168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:34.815970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:34.816006Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:34.816056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:34.816073Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:35.000664Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7608490381677732738:2073], interval end# 2026-02-19T08:28:35.000000Z, event interval end# 2026-02-19T08:28:35.000000Z 2026-02-19T08:28:35.000700Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7608490381677732738:2073], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-02-19T08:28:35.000892Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7608490381677732693:2070], interval end# 2026-02-19T08:28:35.000000Z, event interval end# 2026-02-19T08:28:35.000000Z 2026-02-19T08:28:35.000923Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7608490381677732693:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-02-19T08:28:35.817947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:35.821869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:35.821968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:35.821992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:35.859799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394562635490:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.859910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.860456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490394562635500:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.860504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.106784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877761, Sender [1:7608490398857602814:2508], Recipient [1:7608490381677733072:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.106825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5396: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:36.106841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6251: Pipe server connected, at tablet: 72057594046644480 2026-02-19T08:28:36.106889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122432, Sender [1:7608490398857602810:2505], Recipient [1:7608490381677733072:2150]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-02-19T08:28:36.106902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5309: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-02-19T08:28:36.204095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-02-19T08:28:36.204504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2026-02-19T08:28:36.204626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2026-02-19T08:28:36.205228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-02-19T08:28:36.205262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-02-19T08:28:36.205277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 281474976710658:0 type: TxCreateTable target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-02-19T08:28:36.205318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:28:36.205467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-02-19T08:28:36.205467Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:83: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] path /Root/Followers ShardIndices size 1 2026-02-19T08:28:36.205521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710658:0 Waiting -> CreateParts 2026-02-19T08:28:36.206336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_create_table.cpp:766: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 38] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ... 35079, Sender [0:0:0], Recipient [1:7608490398857602885:2337]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:28:56.315620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3376: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-02-19T08:28:56.390446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:28:56.390487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:28:56.390505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-02-19T08:28:56.390549Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:798: Will execute TTxStoreStats, queue# 2 2026-02-19T08:28:56.390563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:807: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-02-19T08:28:56.390616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 800 row count 4 2026-02-19T08:28:56.390646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 4, DataSize 800 2026-02-19T08:28:56.390656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2026-02-19T08:28:56.390710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:236: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2026-02-19T08:28:56.390753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-02-19T08:28:56.390783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 0 row count 0 2026-02-19T08:28:56.390798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=3, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:28:56.390806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 3 2026-02-19T08:28:56.390826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:398: Do not want to split tablet 72075186224037888 by the CPU load from the follower ID 3, reason: SplitByLoadNotEnabledForTable 2026-02-19T08:28:56.390871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-02-19T08:28:56.390980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-02-19T08:28:56.390997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5501: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-02-19T08:28:56.391009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-02-19T08:28:56.391078Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0.00085 TabletId: 72075186224037888 NodeId: 1 StartTime: 1771489716216 AccessTime: 1771489716785 UpdateTime: 1771489716643 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:28:56.391186Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 3 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0.000122 TabletId: 72075186224037888 NodeId: 1 StartTime: 1771489716296 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 3 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-02-19T08:28:56.761384Z node 1 :SYSTEM_VIEWS INFO: sysview_service.cpp:888: Navigate by database succeeded: service id# [1:7608490381677732738:2073], database# /Root, no sysview processor 2026-02-19T08:28:56.830279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:56.830324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:56.830370Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:56.830383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:57.830736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:57.830770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:57.830814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:57.830828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats for /Root/Followers , attempt 2 2026-02-19T08:28:58.829568Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7608490493346883879:2496], owner: [1:7608490493346883877:2494], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-02-19T08:28:58.830182Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7608490493346883879:2496], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-02-19T08:28:58.830328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274595843, Sender [1:7608490493346883879:2496], Recipient [1:7608490381677733072:2150]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-02-19T08:28:58.830347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5364: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-02-19T08:28:58.830491Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7608490493346883879:2496], row count: 2, finished: 1 2026-02-19T08:28:58.830549Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7608490493346883879:2496], owner: [1:7608490493346883877:2494], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-02-19T08:28:58.831318Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7608490381677732738:2073], database# /Root, query hash# 3266603936201095014, cpu time# 236421 2026-02-19T08:28:58.831533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:58.831557Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-02-19T08:28:58.831592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271124999, Sender [1:7608490381677733072:2150], Recipient [1:7608490381677733072:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-02-19T08:28:58.831605Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5305: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/Followers' ... SELECT from partition_stats, attempt 0 2026-02-19T08:28:59.184939Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7608490497641851193:2505], owner: [1:7608490497641851191:2503], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-02-19T08:28:59.185449Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7608490497641851193:2505], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-02-19T08:28:59.185678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274595843, Sender [1:7608490497641851193:2505], Recipient [1:7608490381677733072:2150]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-02-19T08:28:59.185729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5364: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-02-19T08:28:59.185886Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7608490497641851193:2505], row count: 2, finished: 1 2026-02-19T08:28:59.185967Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7608490497641851193:2505], owner: [1:7608490497641851191:2503], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-02-19T08:28:59.188129Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7608490381677732738:2073], database# /Root, query hash# 18339066598126957035, cpu time# 338818 >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:78:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:81:2057] recipient: [5:80:2112] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:83:2057] recipient: [5:80:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:82:2113] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:198:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:79:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:84:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:83:2113] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:199:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:28:41.674838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:41.768072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:41.783427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:41.783506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:41.783922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:42.001152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:42.001248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:42.055425Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489719478420 != 1771489719478424 2026-02-19T08:28:42.064214Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.107145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:42.190258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:42.478398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:42.491116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:42.593872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:42.870014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-02-19T08:28:43.073471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:43.073559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:43.073653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:43.074356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:43.074511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:43.077575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:43.219605Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:43.291344Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1102:2917] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } ... waiting for SysViewsRoster update finished 2026-02-19T08:28:47.728333Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:28:47.732567Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:28:47.737207Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:28:47.737560Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:28:47.737616Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:47.928856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:47.928936Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:47.950743Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489724995173 != 1771489724995177 2026-02-19T08:28:47.953809Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:47.996416Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:48.074621Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:28:48.360585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:48.372453Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:48.472724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:48.687112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:48.894442Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.894530Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.894596Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.895399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1046:2880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.895605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:48.899218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:49.085646Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1044:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:28:49.121616Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:1103:2918] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for at least 2 blocked commits 2026-02-19T08:28:51.443090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:28:51.443147Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037889 ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037888 >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] >> KqpCompileFallback::FallbackToVersion1Success [GOOD] >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] Test command err: 2026-02-19T08:28:58.447648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490490303490849:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:58.448243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:58.639442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:58.675100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:58.675218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:58.720590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:58.721726Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:58.723460Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490490303490818:2081] 1771489738445845 != 1771489738445848 2026-02-19T08:28:58.835731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:58.835757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:58.835764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:58.835853Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:58.858072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:59.198553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:59.258949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.379788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.478545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:59.518501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.566576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.166840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490503188394585:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.166986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.167248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490503188394595:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.167311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.513457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.539946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.562159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.587456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.613304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.642261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.671137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.708163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.777187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490503188395467:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.777279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.777346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490503188395472:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.777483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490503188395474:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.777526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.782074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:01.789379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490503188395476:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:01.866358Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490503188395527:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:02.997772Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:02.997882Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007CCC990C1458 2026-02-19T08:29:02.997933Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490507483363113:2534], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", keepInCache: 1, split: 0{ TraceId: 01khtg8m4m1xdevgh5gxs6fvna, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNlMjE0YzItNWVlZTdkZS00MjYyZjIzMi01YjU2Nzc5Yw==, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:02.998070Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:02.998132Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490507483363113:2534], queueSize: 1 2026-02-19T08:29:02.998723Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n 2026-02-19T08:29:02.998795Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490507483363113:2534], compileActor: [1:7608490507483363121:2539] 2026-02-19T08:29:02.998815Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:02.998844Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490507483363121:2539], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", startTime: 2026-02-19T08:29:02.998793Z 2026-02-19T08:29:03.012652Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:664: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7608490507483363121:2539], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n " 2026-02-19T08:29:03.189079Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490507483363121:2539]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489743","query_text":"\\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"294fea2-49ae5b5e-dfd0c38c-509e3a3a","version":"1.0"} 2026-02-19T08:29:03.189563Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490507483363121:2539], duration: 0.190744s 2026-02-19T08:29:03.189602Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490507483363121:2539], owner: [1:7608490503188394546:2388], status: SUCCESS, issues: , uid: 294fea2-49ae5b5e-dfd0c38c-509e3a3a 2026-02-19T08:29:03.189696Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490507483363113:2534], status: SUCCESS, compileActor: [1:7608490507483363121:2539] 2026-02-19T08:29:03.189910Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:729: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:03.190024Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490507483363113:2534], queryUid: 294fea2-49ae5b5e-dfd0c38c-509e3a3a, status:SUCCESS 2026-02-19T08:29:03.197684Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1233: Served query from cache by uid, sender: [1:7608490507483363113:2534], queryUid: 294fea2-49ae5b5e-dfd0c38c-509e3a3a |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] Test command err: 2026-02-19T08:28:58.416196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490491436268756:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:58.416976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:58.663370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:58.663515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:58.696407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:58.705240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:58.724776Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:58.727135Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490491436268715:2081] 1771489738413262 != 1771489738413265 2026-02-19T08:28:58.835967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:58.835988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:58.835998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:58.836056Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:58.954302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:59.206946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:59.254211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.364292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.473921Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:59.518539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:59.575815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.055944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490504321172488:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.056086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.056386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490504321172498:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.056453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.513814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.541441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.569252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.596531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.619726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.671009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.697081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.733561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:01.799001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490504321173368:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.799085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.799168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490504321173373:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.799274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490504321173375:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.799328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:01.801973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:01.808552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490504321173377:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:01.905948Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490504321173428:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:02.987833Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:02.987930Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007CE9092E6C58 2026-02-19T08:29:02.987976Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490508616141013:2534], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01khtg8m4actfm8nw7dg5bssbq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjZmN2E0ZmItYjg4ZDM5ZjYtM2ExNjgzNjAtMWIwZWQ0MGY=, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:02.988115Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:02.988169Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490508616141013:2534], queueSize: 1 2026-02-19T08:29:02.988763Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n 2026-02-19T08:29:02.988823Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490508616141013:2534], compileActor: [1:7608490508616141021:2539] 2026-02-19T08:29:02.988847Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:02.988880Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490508616141021:2539], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-02-19T08:29:02.988828Z 2026-02-19T08:29:03.135938Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490508616141021:2539]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489743","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"eb1905f0-3b83c505-558f3dd1-aeb2f6dd","version":"1.0"} 2026-02-19T08:29:03.136439Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490508616141021:2539], duration: 0.147573s 2026-02-19T08:29:03.136477Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490508616141021:2539], owner: [1:7608490504321172446:2386], status: SUCCESS, issues: , uid: eb1905f0-3b83c505-558f3dd1-aeb2f6dd 2026-02-19T08:29:03.136583Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490508616141013:2534], status: SUCCESS, compileActor: [1:7608490508616141021:2539] 2026-02-19T08:29:03.136624Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490508616141013:2534], queryUid: eb1905f0-3b83c505-558f3dd1-aeb2f6dd, status:SUCCESS >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] 2026-02-19T08:28:57.355965Z node 1 :KEYVALUE ERROR: keyvalue_state.cpp:3045: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-02-19T08:29:04.383733Z node 2 :KEYVALUE ERROR: keyvalue_state.cpp:3045: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=noset |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=foo__consumerMPI=foo |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=foo__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=empty >> KqpCompileFallback::NoFallbackWhenSqlVersion1 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=foo |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=noset |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=foo >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=foo [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:85:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:205:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:86:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:206:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:87:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:89:2118] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:92:2057] recipient: [12:89:2118] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:91:2119] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:88:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:93:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:92:2120] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2122] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:96:2057] recipient: [15:93:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2123] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=foo [GOOD] Test command err: Run PartitionLevelMetrics(EnableMetricsLevel=1, FirstClassCitizen=0, PartitionMetricsLevel=2, PartitionMonitoringProjectId=, ConsumersMonitoringProjectId=(NULL)) 2026-02-19T08:29:07.013578Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:29:07.191130Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:29:07.191272Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:29:07.191343Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.191421Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:29:07.219646Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.249322Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 } MetricsLevel: 2 MonitoringProjectId: "" 2026-02-19T08:29:07.251498Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:212:2167] 2026-02-19T08:29:07.254564Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:212:2167] 2026-02-19T08:29:07.261550Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:213:2167] 2026-02-19T08:29:07.263343Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:213:2167] 2026-02-19T08:29:07.279329Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.279816Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|48407411-c7016f62-d8cf1c5c-fc088284_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:29:07.290904Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.291212Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|51ee1367-28881529-8ce36c7f-65558f27_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:29:07.295653Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.295926Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|16b73f3a-ec54c55e-1c26a961-4b1a1bb4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:29:07.300397Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.300660Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c1e4c0d-9eab211-f7d18460-58f946ac_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:29:07.305795Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.306072Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|599b5fd3-7e83da9f-3a093964-9fb0d83b_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 XXXXX before enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:29:07.313882Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.317398Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 2 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 3 } MetricsLevel: 2 MonitoringProjectId: "" XXXXX after enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Set pipe for create session: [1:298:2291] 2026-02-19T08:29:07.320767Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:297:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 298 RawX2: 4294969587 } Cookie: 123 } via pipe: [1:204:2217] Set pipe for create session: [1:310:2300] 2026-02-19T08:29:07.330654Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:309:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 310 RawX2: 4294969596 } Cookie: 123 } via pipe: [1:204:2217] XXXXX after read: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:29:07.336834Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.341104Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 3 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 } MetricsLevel: 2 MonitoringProjectId: "" XXXXX after disable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Run PartitionLevelMetrics(EnableMetricsLevel=1, FirstClassCitizen=0, PartitionMetricsLevel=2, PartitionMonitoringProjectId=, ConsumersMonitoringProjectId=) 2026-02-19T08:29:07.728451Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:29:07.766873Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:29:07.766933Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:29:07.766972Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.767015Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:29:07.780131Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:07.780693Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 4 actor [2:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 } Consumers { Name: "user1" Generation: 4 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 2 MonitoringProjectId: "" 2026-02-19T08:29:07.781121Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:215:2167] 2026-02-19T08:29:07.782550Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:215:2167] 2026-02-19T08:29:07.784142Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:216:2167] 2026-02-19T08:29:07.785089Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:216:2167] 2026-02-19T08:29:07.789573Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.789805Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|534271f2-ac7270c4-368a37d0-8758bbea_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:29:07.794099Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:29:07.794304Z node 2 :PERSQUEUE INFO: ... ion: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 monitoring_project_id=foo: Account=asdfgs: TopicPath=asdfgs/topic: OriginDC=Dc1: Partition=0: ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 29 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Partition=1: ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 19 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Sensor: MessageLagByCommittedPerPartition {"":[30,20],"foo":[30,20]} Sensor: BytesReadPerPartition {"":[0,0],"foo":[0,0]} Sensor: MessagesReadPerPartition {"":[0,0],"foo":[0,0]} Set pipe for create session: [9:299:2291] 2026-02-19T08:29:10.994028Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:298:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 299 RawX2: 38654707955 } Cookie: 123 } via pipe: [9:207:2219] Set pipe for create session: [9:311:2300] 2026-02-19T08:29:10.998268Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:310:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 311 RawX2: 38654707964 } Cookie: 123 } via pipe: [9:207:2219] XXXXX after read: host=cluster: Account=asdfgs: TopicPath=asdfgs/topic: OriginDC=Dc1: Partition=0: sensor=BytesWrittenPerPartition: 540 sensor=MessagesWrittenPerPartition: 30 sensor=SourceIdCount: 3 sensor=TimeSinceLastWriteMs: 0 sensor=WriteTimeLagMsByLastWrite: 000 ConsumerPath=shared/user: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 29 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 106 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 29 sensor=MessagesReadPerPartition: 2 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Partition=1: sensor=BytesWrittenPerPartition: 360 sensor=MessagesWrittenPerPartition: 20 sensor=SourceIdCount: 2 sensor=TimeSinceLastWriteMs: 0 sensor=WriteTimeLagMsByLastWrite: 000 ConsumerPath=shared/user: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 19 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 693 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 19 sensor=MessagesReadPerPartition: 17 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 monitoring_project_id=foo: Account=asdfgs: TopicPath=asdfgs/topic: OriginDC=Dc1: Partition=0: ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 106 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 29 sensor=MessagesReadPerPartition: 2 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Partition=1: ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 693 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 19 sensor=MessagesReadPerPartition: 17 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Sensor: MessageLagByCommittedPerPartition {"":[30,20],"foo":[30,20]} Sensor: BytesReadPerPartition {"":[106,693],"foo":[106,693]} Sensor: MessagesReadPerPartition {"":[2,17],"foo":[2,17]} 2026-02-19T08:29:11.031076Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:29:11.037164Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 27 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 27 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 } Consumers { Name: "user1" Generation: 25 Important: false MetricsLevel: 2 MonitoringProjectId: "foo" } MetricsLevel: 3 XXXXX after disable: host=cluster: Account=asdfgs: TopicPath=asdfgs/topic: OriginDC=Dc1: Partition=0: sensor=BytesWrittenPerPartition: 540 sensor=MessagesWrittenPerPartition: 30 sensor=SourceIdCount: 3 sensor=TimeSinceLastWriteMs: 0 sensor=WriteTimeLagMsByLastWrite: 000 ConsumerPath=shared/user: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 29 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 106 sensor=MessageLagByCommittedPerPartition: 30 sensor=MessageLagByLastReadPerPartition: 28 sensor=MessagesReadPerPartition: 2 sensor=ReadTimeLagMsPerPartition: 8 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 Partition=1: sensor=BytesWrittenPerPartition: 360 sensor=MessagesWrittenPerPartition: 20 sensor=SourceIdCount: 2 sensor=TimeSinceLastWriteMs: 0 sensor=WriteTimeLagMsByLastWrite: 000 ConsumerPath=shared/user: sensor=BytesReadPerPartition: 0 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 19 sensor=MessagesReadPerPartition: 0 sensor=ReadTimeLagMsPerPartition: 0 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 ConsumerPath=shared/user1: sensor=BytesReadPerPartition: 693 sensor=MessageLagByCommittedPerPartition: 20 sensor=MessageLagByLastReadPerPartition: 3 sensor=MessagesReadPerPartition: 17 sensor=ReadTimeLagMsPerPartition: 5 sensor=TimeSinceLastReadMsPerPartition: 000 sensor=WriteTimeLagMsByCommittedPerPartition: 000 sensor=WriteTimeLagMsByLastReadPerPartition: 000 monitoring_project_id=foo: Account=asdfgs: TopicPath=asdfgs/topic: OriginDC=Dc1: Partition=0: ConsumerPath=shared/user1: Partition=1: ConsumerPath=shared/user1: Sensor: MessageLagByCommittedPerPartition {"":[30,20]} Sensor: BytesReadPerPartition {"":[106,693]} Sensor: MessagesReadPerPartition {"":[2,17]} |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameToLongKey >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageRed >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] Test command err: 2026-02-19T08:29:09.003099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490537988170680:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:09.003412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:09.176163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:09.176268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:09.194447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:09.204754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:09.204833Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:09.206515Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490533693203352:2081] 1771489749001976 != 1771489749001979 2026-02-19T08:29:09.260935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:09.260964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:09.260972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:09.261080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:09.491916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:09.597741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:09.646634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:09.744047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:09.876394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:09.936601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:10.039889Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:11.255540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490546578107114:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.255653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.256002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490546578107124:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.256056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.485096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.506327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.527512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.549159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.571846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.597308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.623771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.659174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.725406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490546578107993:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.725490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.725566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490546578107998:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.725646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490546578108000:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.725690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.728734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:11.738239Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490546578108002:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:11.797589Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490546578108053:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:13.009391Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:13.009559Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007C71F3ABD2B8 2026-02-19T08:29:13.009608Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490555168042935:2533], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01khtg8xxg6mwsa4xk36420tnv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRiNDQzZWEtZjU3MjA2Ny1mZjUwNzQ4ZS1mOTQ0NTA3Mw==, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:13.009764Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:13.009827Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490555168042935:2533], queueSize: 1 2026-02-19T08:29:13.010377Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490555168042935:2533], compileActor: [1:7608490555168042943:2538] 2026-02-19T08:29:13.010405Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:13.010446Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490555168042943:2538], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-02-19T08:29:13.010383Z 2026-02-19T08:29:13.131483Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490555168042943:2538]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489753","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"8971ee6f-f07d97ac-671b262f-52de09c5","version":"1.0"} 2026-02-19T08:29:13.132186Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490555168042943:2538], duration: 0.121770s 2026-02-19T08:29:13.132228Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490555168042943:2538], owner: [1:7608490546578107073:2387], status: SUCCESS, issues: , uid: 8971ee6f-f07d97ac-671b262f-52de09c5 2026-02-19T08:29:13.132305Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490555168042935:2533], status: SUCCESS, compileActor: [1:7608490555168042943:2538] 2026-02-19T08:29:13.132360Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490555168042935:2533], queryUid: 8971ee6f-f07d97ac-671b262f-52de09c5, status:SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] Test command err: 2026-02-19T08:29:09.336196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490539766349864:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:09.336269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:09.539062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:09.539222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:09.572928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:09.593406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:09.595400Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:09.597023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490539766349833:2081] 1771489749334719 != 1771489749334722 2026-02-19T08:29:09.668168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:09.668187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:09.668194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:09.668299Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:09.794344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:09.985436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:10.032888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:10.141468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:10.267370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:10.329852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:10.344371Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:11.588362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490548356286305:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.588495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.588780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490548356286315:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.588851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.832096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.857831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.882532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.906788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.934601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.965971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.997048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:12.032835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:12.090436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490552651254480:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:12.090495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:12.090555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490552651254485:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:12.090598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490552651254486:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:12.090618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:12.093987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:12.103943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490552651254489:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:12.175647Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490552651254540:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:13.270624Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:13.270738Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007C7ABECCB1D8 2026-02-19T08:29:13.270790Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490556946222125:2533], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01khtg8y5n1q0ffqf9m6n44rjs, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MmVlMmIwMWMtMzc1ZTg2MWItNjc5OTQ0OWMtZTUyNWQ3MDE=, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:13.270933Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:13.270987Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490556946222125:2533], queueSize: 1 2026-02-19T08:29:13.271448Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490556946222125:2533], compileActor: [1:7608490556946222133:2538] 2026-02-19T08:29:13.271500Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:13.271555Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490556946222133:2538], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-02-19T08:29:13.271472Z 2026-02-19T08:29:13.386679Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490556946222133:2538]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489753","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"cf2deb31-a9078500-e15661fb-142d5166","version":"1.0"} 2026-02-19T08:29:13.387017Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490556946222133:2538], duration: 0.115529s 2026-02-19T08:29:13.387043Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490556946222133:2538], owner: [1:7608490548356286263:2386], status: SUCCESS, issues: , uid: cf2deb31-a9078500-e15661fb-142d5166 2026-02-19T08:29:13.387151Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490556946222125:2533], status: SUCCESS, compileActor: [1:7608490556946222133:2538] 2026-02-19T08:29:13.387208Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490556946222125:2533], queryUid: cf2deb31-a9078500-e15661fb-142d5166, status:SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] 2026-02-19T08:29:14.394012Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:256: {KV323@keyvalue_storage_read_request.cpp:256} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2026-02-19T08:29:14.397853Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1006: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2026-02-19T08:29:14.397913Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1925: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 >> KqpCompileFallback::FallbackWithScanQuery [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> DataStreams::TestStreamStorageRetention >> DataStreams::TestNonChargeableUser >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] Test command err: 2026-02-19T08:29:10.602954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490542866480492:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:10.604300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:10.795190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:10.811618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:10.811739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:10.844924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:10.846541Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:10.847445Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490542866480458:2081] 1771489750600467 != 1771489750600470 2026-02-19T08:29:10.911736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:10.911756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:10.911762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:10.911845Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:11.081554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:11.282430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:11.337571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.445369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.575300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.609410Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:11.628508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.063851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490555751384222:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.063968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.064254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490555751384232:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.064301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.272789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.294915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.318229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.342123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.366363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.396775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.426126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.469077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:13.531072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490555751385106:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.531169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.531217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490555751385111:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.531339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490555751385113:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.531380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:13.534260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:13.541985Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490555751385115:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:13.615046Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490555751385166:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:14.801811Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:14.801894Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007C8E50ED8F18 2026-02-19T08:29:14.801923Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490560046352753:2534], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", keepInCache: 1, split: 0{ TraceId: 01khtg8zng5v8dnrr4mszsghp8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2JhNGUwNjAtYjBiM2E0MTMtMjZhY2VkMmMtMTg3OGMyYWY=, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:14.802015Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:14.802045Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490560046352753:2534], queueSize: 1 2026-02-19T08:29:14.802478Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n 2026-02-19T08:29:14.802541Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490560046352753:2534], compileActor: [1:7608490560046352762:2540] 2026-02-19T08:29:14.802580Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:14.802649Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490560046352762:2540], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", startTime: 2026-02-19T08:29:14.802548Z 2026-02-19T08:29:14.896758Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490560046352762:2540]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489754","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (0, +∞)\"],\"type\":\"Scan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"f21a24f7-29283efb-70bb4e60-2733ad94","version":"1.0"} 2026-02-19T08:29:14.897147Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490560046352762:2540], duration: 0.094578s 2026-02-19T08:29:14.897178Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490560046352762:2540], owner: [1:7608490555751384180:2386], status: SUCCESS, issues: , uid: f21a24f7-29283efb-70bb4e60-2733ad94 2026-02-19T08:29:14.897287Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490560046352753:2534], status: SUCCESS, compileActor: [1:7608490560046352762:2540] 2026-02-19T08:29:14.897446Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:729: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:14.897517Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490560046352753:2534], queryUid: f21a24f7-29283efb-70bb4e60-2733ad94, status:SUCCESS >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> KqpSystemView::QueryStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:94:2057] recipient: [13:93:2122] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:96:2057] recipient: [13:93:2122] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:95:2123] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:211:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:92:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:95:2057] recipient: [15:94:2122] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:97:2057] recipient: [15:94:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:96:2123] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:212:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestProgram::YqlKernelEquals |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: 2026-02-19T08:28:18.447701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490322111706082:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.449304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.525625Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490320447644559:2092];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:18.560813Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:18.901736Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.901955Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:28:18.902215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.929863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:18.991881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.991996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.993881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.993950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:18.995185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:18.995252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:19.051888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.053816Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:28:19.080465Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:28:19.088735Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:19.092406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.092604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:19.230251Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.397570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.399108Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:19.479276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.482387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:28:19.482403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:28:19.482411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:28:19.482511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:19.552062Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:19.571727Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:20.430672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:20.633207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.020330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.442243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:21.620398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:23.453813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490322111706082:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.453882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.521461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490320447644559:2092];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:23.521515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:28:23.846470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343586544752:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.846576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.849969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490343586544762:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:23.850040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:24.269224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.326541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.381613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.442367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.483117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.531909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:24.589463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemes ... Executions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:07.860739Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:07.860767Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:07.860785Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:07.860886Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:07.864237Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:07.902950Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:08.367479Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:08.387530Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:08.455106Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:08.561501Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:08.613648Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:08.620880Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:08.620878Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:08.625502Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:11.312767Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7608490548832280916:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.312875Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.313144Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7608490548832280925:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.313217Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.392759Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.440109Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.498699Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.559115Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.606631Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.654753Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.709861Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.884278Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:11.969678Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7608490548832282002:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.969786Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.969855Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7608490548832282007:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.970004Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7608490548832282009:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.970046Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:11.973820Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:11.992900Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7608490548832282011:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:29:12.073315Z node 16 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [16:7608490553127249377:4697] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:12.606763Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7608490531652409525:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:12.606840Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:12.614938Z node 17 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7608490530810104189:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:12.615046Z node 17 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:12.620076Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7608490529847760052:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:12.620157Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:14.675162Z node 16 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489754668, txId: 281474976715675] shutting down >> TestProgram::YqlKernelStartsWithScalar >> TestProgram::JsonExistsBinary >> TestProgram::YqlKernelContains >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> TestProgram::JsonExistsBinary [GOOD] >> TestProgram::YqlKernelContains [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] Test command err: 2026-02-19T08:29:13.324066Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490558048385599:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:13.324187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:13.501157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:13.506495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:13.506596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:13.552957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:13.613842Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:13.616479Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490558048385568:2081] 1771489753322730 != 1771489753322733 2026-02-19T08:29:13.662853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:13.662886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:13.662895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:13.662988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:13.678991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:14.018691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:14.076292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:14.162581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:14.269744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:14.329359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:14.331820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:15.647371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490566638322035:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:15.647526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:15.647865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490566638322045:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:15.647911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:15.909782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:15.935978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:15.960788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:15.987726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:16.012331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:16.040850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:16.071582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:16.116719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:16.194030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490570933290210:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:16.194097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:16.194147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490570933290215:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:16.194301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490570933290217:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:16.194350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:16.197163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:16.204906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490570933290219:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:16.278795Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490570933290270:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:17.405762Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:17.405894Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Perform request, TraceId.SpanIdPtr: 0x00007CD2F7F9D5A8 2026-02-19T08:29:17.405951Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:425: Received compile request, sender: [1:7608490575228257858:2534], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01khtg926waxn4t2abp0c1v5s5, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTdjMDFjY2ItZjI0ZmMwZGEtOWZlMmE1ZGItYmE5ZjY5YWM=, PoolId: default, IsStreamingQuery: 0} 2026-02-19T08:29:17.406090Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1257: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 0RuntimeParameterSizeLimitSatisfied: 0}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-02-19T08:29:17.406140Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:523: Added request to queue, sender: [1:7608490575228257858:2534], queueSize: 1 2026-02-19T08:29:17.406556Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:888: Created compile actor, sender: [1:7608490575228257858:2534], compileActor: [1:7608490575228257866:2539] 2026-02-19T08:29:17.406603Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:271: traceId: verbosity = 0, trace_id = 0 2026-02-19T08:29:17.406654Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:278: Start compilation, self: [1:7608490575228257866:2539], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-02-19T08:29:17.406579Z 2026-02-19T08:29:17.538010Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:427: [[1:7608490575228257866:2539]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1771489757","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"7329de4d-b7e6455b-be225438-84a6782c","version":"1.0"} 2026-02-19T08:29:17.538526Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:618: Compilation successful, self: [1:7608490575228257866:2539], duration: 0.131921s 2026-02-19T08:29:17.538569Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:439: Send response, self: [1:7608490575228257866:2539], owner: [1:7608490566638321993:2386], status: SUCCESS, issues: , uid: 7329de4d-b7e6455b-be225438-84a6782c 2026-02-19T08:29:17.538684Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:638: Received response, sender: [1:7608490575228257858:2534], status: SUCCESS, compileActor: [1:7608490575228257866:2539] 2026-02-19T08:29:17.538757Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:911: Send response, sender: [1:7608490575228257858:2534], queryUid: 7329de4d-b7e6455b-be225438-84a6782c, status:SUCCESS |94.7%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] >> TestProgram::JsonValueBinary |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:90:2057] recipient: [8:89:2119] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:92:2057] recipient: [8:89:2119] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:91:2120] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:207:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:91:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:94:2057] recipient: [9:93:2123] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:96:2057] recipient: [9:93:2123] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:95:2124] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:211:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:94:2057] recipient: [10:93:2123] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:96:2057] recipient: [10:93:2123] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:95:2124] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:211:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:96:2057] recipient: [11:95:2125] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:98:2057] recipient: [11:95:2125] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:97:2126] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:213:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:96:2057] recipient: [12:95:2125] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:98:2057] recipient: [12:95:2125] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:97:2126] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:213:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:96:2057] recipient: [13:95:2125] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:98:2057] recipient: [13:95:2125] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:97:2126] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:213:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:98:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:101:2057] recipient: [14:100:2129] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:103:2057] recipient: [14:100:2129] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:102:2130] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:218:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:102:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:105:2057] recipient: [15:104:2133] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:107:2057] recipient: [15:104:2133] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:106:2134] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:222:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:102:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:105:2057] recipient: [16:104:2133] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:107:2057] recipient: [16:104:2133] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:106:2134] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:104:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:107:2057] recipient: [17:106:2135] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:109:2057] recipient: [17:106:2135] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:108:2136] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:224:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:104:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:107:2057] recipient: [18:106:2135] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:109:2057] recipient: [18:106:2135] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:108:2136] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls >> TestProgram::NumRowsWithNulls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\" ... ?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] >> TestProgram::Like |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"a\":true,\"p\":{\"function\":{\"function\":\"NumRows\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"a":true,"p":{"function":{"function":"NumRows","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Filter; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; >> AnalyzeColumnshard::AnalyzeCancel [GOOD] >> TestProgram::YqlKernelEndsWith |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] >> TestProgram::YqlKernelEndsWith [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow11BooleanTypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernel [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeCancel [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:26:03.186255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:03.253393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:03.257272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:26:03.257499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:26:03.257619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:26:03.546033Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:03.633362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:03.633442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:03.690164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:03.777927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:26:04.386641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:04.386674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:04.386696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:04.388444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:26:04.388738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:04.449692Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:26:04.941187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:26:07.280258Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:26:07.284293Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:26:07.287046Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:26:07.308264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:07.308340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:07.344921Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:26:07.346934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:07.491251Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:07.491354Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:07.492727Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.493351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.493941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.494863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.495001Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.495291Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.495400Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.495533Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.495708Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:26:07.510942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:07.711178Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:07.728474Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:26:07.728542Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:26:07.763458Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:26:07.763786Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:26:07.763981Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:26:07.764054Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:26:07.764139Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:26:07.764199Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:26:07.764252Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:26:07.764319Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:26:07.764778Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:26:07.773930Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:07.774028Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2141:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:26:07.801601Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2214:2647] 2026-02-19T08:26:07.802057Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2214:2647], schemeshard id = 72075186224037897 2026-02-19T08:26:07.842276Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2263:2669] 2026-02-19T08:26:07.844568Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:26:07.853216Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:26:07.853275Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:26:07.853350Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:26:07.857202Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:26:07.860144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:26:07.865505Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:26:07.865605Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:26:07.874937Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:26:08.080805Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:26:08.194421Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:26:08.404852Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:26:08.534921Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:26:08.534997Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:26:09.221279Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... State, LegacyTraceId: 01khtg44304r58rzbcyaqvevae, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-02-19T08:27:34.985456Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg44304r58rzbcyaqvevae", SessionId: ydb://session/3?node_id=2&id=YTM1ZGY1LWJhZWRkZTY2LWEzMTc0MjI2LTg0OGQ4ZmMy, Slow query, duration: 59.490122s, status: ABORTED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:27:34.991204Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-02-19T08:27:35.011292Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:27:35.011376Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3170:3439] ... unblocking NKikimr::TEvDataShard::TEvKqpScan from to TX_COLUMNSHARD_ACTOR 2026-02-19T08:27:35.130467Z node 2 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:199: Scan [2:4355:3880] undelivered event: 271646932 txId: 281474976720661 scanId: 1 gen: 1 tablet: 72075186224037899 reason: ActorUnknown description: init failed 2026-02-19T08:27:35.132748Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4363:3888] 2026-02-19T08:27:35.133512Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3170:3439] , Record { OperationId: "operationId2" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-02-19T08:27:35.133583Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: "operationId2", DatabaseName: `', Types: 1 2026-02-19T08:27:35.133635Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: "operationId2", PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-02-19T08:27:35.149889Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:112: [72075186224037894] TTxAnalyze::Complete 2026-02-19T08:27:35.150113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:27:35.150294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId2", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 34], AnalyzeActorId: [2:4366:3891] 2026-02-19T08:27:35.150878Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:27:35.154570Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table` 2026-02-19T08:27:35.154726Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], Start read next stream part 2026-02-19T08:27:35.200276Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4384:3908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:27:35.200584Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-02-19T08:27:35.200639Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 2, ReplyToActorId = [2:4384:3908], StatRequests.size() = 1 2026-02-19T08:28:41.407849Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:28:41.408143Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], Start read next stream part 2026-02-19T08:28:41.408371Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg5ybjc3npcazq2p6r8xr2", SessionId: ydb://session/3?node_id=2&id=MjZkYTQ0NDYtNWRiNDdlZmYtNjFiM2FiMzgtYWVhMWQ2N2Y=, Slow query, duration: 66.249757s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:28:41.409893Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:28:41.412854Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-02-19T08:28:41.413369Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:28:41.413493Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], Start read next stream part 2026-02-19T08:28:41.416838Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:41.416911Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4368:3893], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2MyOGJhZGItM2QxYTk3YzktYzkzOTAzYmQtNWY5NDVhYg==, TxId: 2026-02-19T08:28:41.532281Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4496:4008]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:41.532612Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:41.532674Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4496:4008], StatRequests.size() = 1 2026-02-19T08:29:20.208214Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:29:20.208331Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], Start read next stream part 2026-02-19T08:29:20.208490Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg7z252wpqc837z5y6krxa", SessionId: ydb://session/3?node_id=2&id=ZjU0MTFjY2MtYTIyOWU3MTEtNWQ5MDExMWItNTM2ZjU0OWM=, Slow query, duration: 38.790193s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:29:20.210731Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4585:4091], ActorId: [2:4586:4092], Starting query actor #1 [2:4587:4093] 2026-02-19T08:29:20.210789Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4586:4092], ActorId: [2:4587:4093], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:29:20.212962Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:29:20.213004Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4366:3891], ActorId: [2:4477:3993], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDJkYjA4N2UtOGZhYmNlMGItZDc5MjBlMzAtNzNkYTM0YTQ=, TxId: 2026-02-19T08:29:20.213336Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-02-19T08:29:20.214029Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4586:4092], ActorId: [2:4587:4093], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZGI0ZDQzNWMtNzEyZDRhMDQtZjU0NDkxYTMtZjc4NmU2MTc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:29:20.273153Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4603:4107]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:29:20.273377Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:29:20.273422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [2:4603:4107], StatRequests.size() = 1 2026-02-19T08:29:20.442052Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4586:4092], ActorId: [2:4587:4093], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGI0ZDQzNWMtNzEyZDRhMDQtZjU0NDkxYTMtZjc4NmU2MTc=, TxId: 2026-02-19T08:29:20.442152Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4586:4092], ActorId: [2:4587:4093], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGI0ZDQzNWMtNzEyZDRhMDQtZjU0NDkxYTMtZjc4NmU2MTc=, TxId: 2026-02-19T08:29:20.442609Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4585:4091], ActorId: [2:4586:4092], Got response [2:4587:4093] SUCCESS 2026-02-19T08:29:20.443003Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:29:20.468171Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:29:20.468260Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId2", ActorId=[1:3170:3439] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TestProgram::YqlKernelStartsWith [GOOD] >> TestProgram::JsonExists >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue >> TestProgram::SimpleFunction [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::JsonValue [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; >> TestProgram::YqlKernelEndsWithScalar [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000 ... 04\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10DoubleTypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9UInt8TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"a\":true,\"i\":\"2\",\"p\":{\"function\":{\"function\":\"Count\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"a":true,"i":"2","p":{"function":{"function":"Count","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT >> TestProgram::CountUIDByVAT [GOOD] >> DataStreams::TestShardPagination [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"2,4\",\"p\":{\"options\":[\"{10001(Count):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[4]},\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"a":true,"i":"2,4","p":{"options":["{10001(Count):[2]}"],"type":"AGGREGATION","keys":[4]},"o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:530;T=N5arrow9Int32TypeE; >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 3256132137878018931 2026-02-19T08:29:27.646425Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646490Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646523Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646548Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646568Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646592Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.646622Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647360Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647412Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647442Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647471Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647498Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647530Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647560Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.647630Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647664Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647701Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647753Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647776Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647804Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.647828Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2026-02-19T08:29:27.649030Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649089Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649121Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649159Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649197Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649227Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2026-02-19T08:29:27.649271Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2026-02-19T08:29:15.927305Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490565008005438:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:15.927364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:16.119111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:16.146026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:16.146167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:16.188071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:16.247878Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:16.290258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:16.307175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:16.307198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:16.307206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:16.307334Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:16.505461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:16.550821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:16.682783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2026-02-19T08:29:16.935442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:16.968177Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490569302974818:3521] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:19.398533Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490581513114032:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:19.398570Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:19.410308Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:19.484380Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:19.513809Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:19.513871Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:19.523960Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:19.564342Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:19.564362Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:19.564366Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:19.564417Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:19.692308Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:19.707970Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:19.745794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:19.882943Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:29:20.403652Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:23.995402Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490597285218901:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:23.995450Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:24.009475Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:24.091308Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:24.113204Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:24.113303Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:24.146224Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:24.198124Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:24.198153Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:24.198161Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:24.198233Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:24.310373Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:24.423353Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:24.473462Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:24.679345Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:29:25.000977Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |94.8%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnShardTiers::DSConfigs |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:203:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:86:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:89:2057] recipient: [16:88:2118] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:91:2057] recipient: [16:88:2118] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:90:2119] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:206:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:86:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:89:2057] recipient: [17:88:2118] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:91:2057] recipient: [17:88:2118] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:90:2119] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:206:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:87:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:90:2057] recipient: [18:89:2118] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:92:2057] recipient: [18:89:2118] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:91:2119] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:207:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:90:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:93:2057] recipient: [19:92:2121] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:95:2057] recipient: [19:92:2121] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:94:2122] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:210:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:90:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:93:2057] recipient: [20:92:2121] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:95:2057] recipient: [20:92:2121] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:94:2122] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:210:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:91:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:94:2057] recipient: [21:93:2121] Leader for TabletID 72057594037927937 is [21:95:2122] sender: [21:96:2057] recipient: [21:93:2121] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestReadRequestInFlightLimit >> ColumnShardTiers::TTLUsage >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:84:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2119] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:91:2057] recipient: [9:88:2119] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2120] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2119] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:91:2057] recipient: [10:88:2119] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2120] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2121] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:93:2057] recipient: [11:90:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2122] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2121] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:93:2057] recipient: [12:90:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2122] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:90:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:92:2123] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:95:2057] recipient: [13:92:2123] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:94:2124] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:210:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:90:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:93:2057] recipient: [14:92:2123] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:95:2057] recipient: [14:92:2123] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:94:2124] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:210:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2123] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:96:2057] recipient: [15:93:2123] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2124] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:93:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:96:2057] recipient: [16:95:2125] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:98:2057] recipient: [16:95:2125] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:97:2126] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:213:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:93:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:96:2057] recipient: [17:95:2125] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:98:2057] recipient: [17:95:2125] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:97:2126] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:213:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:94:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:97:2057] recipient: [18:96:2125] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:99:2057] recipient: [18:96:2125] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:98:2126] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:214:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] >> ColumnShardTiers::DSConfigsStub |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] >> TKeyValueTest::TestRenameToLongKey [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:89:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2117] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:108:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:109:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:78:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:81:2057] recipient: [15:80:2112] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:83:2057] recipient: [15:80:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:82:2113] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:198:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:79:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:84:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:83:2113] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:199:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:85:2057] recipient: [18:84:2115] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:87:2057] recipient: [18:84:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:86:2116] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:202:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:83:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:86:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:88:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:87:2116] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:203:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2118] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:91:2057] recipient: [21:88:2118] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2119] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:206:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:87:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:90:2057] recipient: [23:89:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:89:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:207:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic >> S3SettingsConversion::Basic [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2026-02-19T08:29:16.174142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490568818092926:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:16.174225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:16.333335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:16.356015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:16.356096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:16.384288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:16.466027Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:16.524915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:16.524938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:16.524946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:16.525049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:16.563791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:16.721762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:16.823353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:16.964482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:29:17.180534Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:17.186614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-02-19T08:29:17.249540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:29:17.312562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:29:17.365271Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:29:17.365316Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-02-19T08:29:17.365331Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:29:17.371668Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-02-19T08:29:17.371761Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-02-19T08:29:17.371801Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1771489757098-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1771489757}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1771489757279-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1771489757}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1771489757278-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1771489757}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1771489757098-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1771489757}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1771489757279-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1771489757}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1771489757278-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1771489757,"finish":1771489757},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1771489757}' 2026-02-19T08:29:19.919588Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608490581570138343:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:19.919636Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:19.931886Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:20.007446Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:20.026076Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:20.026 ... 00:1771489764.483446 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.484342 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.491566 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.491703 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.496619 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.496800 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.517471 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.517598 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:29:24.532339Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:29:24.566870Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1771489764.604455 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.604558 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:29:24.613432Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1771489764.647647 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.647748 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:29:24.655710Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1771489764.698656 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.698776 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.709957 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.710076 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:29:24.742852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-02-19T08:29:24.758201Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2026-02-19T08:29:24.758229Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-02-19T08:29:24.758239Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-02-19T08:29:24.758249Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2026-02-19T08:29:24.758258Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-02-19T08:29:24.758269Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-02-19T08:29:24.767058Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-02-19T08:29:24.767109Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-02-19T08:29:24.767129Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-02-19T08:29:24.767174Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-02-19T08:29:24.767193Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-02-19T08:29:24.767219Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1771489764.774190 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489764.774324 525111 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-02-19T08:29:24.846098Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:27.716116Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7608490616849072582:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:27.716185Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:27.729765Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:27.813018Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:27.830183Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:27.830259Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:27.836306Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:27.884212Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:27.884259Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:27.884266Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:27.884346Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:27.917664Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:28.081135Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:28.133191Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:28.294626Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) E0000 00:00:1771489768.382381 526116 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489768.389867 526116 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489768.395704 526116 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489768.401687 526116 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1771489768.407897 526116 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:78:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:81:2057] recipient: [13:80:2112] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:83:2057] recipient: [13:80:2112] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:82:2113] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:198:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:78:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:81:2057] recipient: [14:80:2112] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:83:2057] recipient: [14:80:2112] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:82:2113] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:198:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:79:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:82:2057] recipient: [15:81:2112] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:84:2057] recipient: [15:81:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:83:2113] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:199:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:82:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:85:2057] recipient: [16:84:2115] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:87:2057] recipient: [16:84:2115] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:86:2116] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:202:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:85:2057] recipient: [17:84:2115] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:87:2057] recipient: [17:84:2115] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:86:2116] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:202:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:83:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:86:2057] recipient: [18:85:2115] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:88:2057] recipient: [18:85:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:87:2116] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:88:2057] recipient: [19:87:2117] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:90:2057] recipient: [19:87:2117] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:89:2118] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:205:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:88:2057] recipient: [20:87:2117] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:90:2057] recipient: [20:87:2117] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:89:2118] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:205:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2117] Leader for TabletID 72057594037927937 is [21:90:2118] sender: [21:91:2057] recipient: [21:88:2117] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 11669745920304820836 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 4287612612564412039 2026-02-19T08:29:28.928743Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2026-02-19T08:29:28.930706Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13786775874381441966] 2026-02-19T08:29:28.954300Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks |94.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> TKeyValueTracingTest::ReadSmall >> KqpNamedExpressions::NamedExpressionSimple-UseSink >> TKeyValueTracingTest::ReadHuge |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall >> KqpSqlIn::SimpleKey >> KqpNamedExpressions::NamedExpressionRandomInsertDataQuery-UseSink >> KqpSqlIn::KeyTypeMissmatch_Int ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:85:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:88:2057] recipient: [22:87:2117] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:90:2057] recipient: [22:87:2117] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:89:2118] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:205:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:88:2057] recipient: [23:87:2117] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:90:2057] recipient: [23:87:2117] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:89:2118] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:205:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] >> KqpAgg::ScalarAggregationResult >> KqpNewEngine::DeleteOn-UseSink >> KqpNewEngine::PureExpr >> KqpSqlIn::TableSource >> KqpNewEngine::ContainerRegistryCombiner >> KqpReturning::ReturningWorks+QueryService >> KqpSort::ReverseOptimized >> TKeyValueTracingTest::WriteHuge [GOOD] >> TKeyValueTracingTest::WriteSmall [GOOD] >> TKeyValueTracingTest::ReadSmall [GOOD] >> TKeyValueTracingTest::ReadHuge [GOOD] >> KqpNewEngine::PkSelect1 >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query |94.9%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:54:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:52:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:54:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:52:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:203:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:86:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:89:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:91:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:90:2119] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:206:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:87:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:90:2057] recipient: [24:89:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:89:2118] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:207:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [GOOD] >> KqpMergeCn::TopSortBy_Utf8_Limit2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] ... r refreshed! new actor is[22:86:2116] Leader for TabletID 72057594037927937 is [22:86:2116] sender: [22:202:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:82:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:84:2115] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:87:2057] recipient: [23:84:2115] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:86:2116] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:202:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:83:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:85:2115] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:88:2057] recipient: [24:85:2115] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:87:2116] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:203:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:54:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:86:2057] recipient: [25:39:2086] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:89:2057] recipient: [25:88:2118] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:91:2057] recipient: [25:88:2118] !Reboot 72057594037927937 (actor [25:58:2099]) rebooted! !Reboot 72057594037927937 (actor [25:58:2099]) tablet resolver refreshed! new actor is[25:90:2119] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:206:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:52:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:59:2057] recipient: [26:52:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:76:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:86:2057] recipient: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:89:2057] recipient: [26:88:2118] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:91:2057] recipient: [26:88:2118] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:90:2119] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:206:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:87:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:90:2057] recipient: [27:89:2118] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:92:2057] recipient: [27:89:2118] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:91:2119] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:207:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:52:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:52:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:90:2057] recipient: [28:39:2086] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:93:2057] recipient: [28:92:2121] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:95:2057] recipient: [28:92:2121] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:94:2122] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:210:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:90:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:93:2057] recipient: [29:92:2121] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:95:2057] recipient: [29:92:2121] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:94:2122] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:210:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:91:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:94:2057] recipient: [30:93:2121] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:96:2057] recipient: [30:93:2121] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:95:2122] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:211:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:93:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:96:2057] recipient: [31:95:2123] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:98:2057] recipient: [31:95:2123] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:97:2124] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:213:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:93:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:214:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> KqpSqlIn::KeySuffix >> KqpNamedExpressions::NamedExpressionSimple+UseSink >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> KqpSort::TopSortParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2026-02-19T08:28:34.204513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:34.204576Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:34.205396Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:34.218445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:34.218787Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:34.219042Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:34.265138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:34.272617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:34.272812Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:34.274424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:34.274495Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:34.274554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:34.274942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:34.275022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:34.275111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:34.354470Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:34.384709Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:34.384915Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:34.385016Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:34.385064Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:34.385103Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:34.385139Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:34.385332Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.385380Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.385742Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:34.385845Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:34.385945Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:34.385994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:34.386041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:34.386078Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:34.386125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:34.386157Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:34.386212Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:34.386328Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.386374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.386416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:34.392886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:34.392980Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:34.393072Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:34.393260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:34.393328Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:34.393393Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:34.393438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:34.393473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:34.393506Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:34.393538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:34.393850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:34.393890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:34.393924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:34.393972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:34.394028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:34.394056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:34.394087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:34.394117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:34.394154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:34.407305Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:34.407383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:34.407432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:34.407478Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:34.407564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:34.408044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.408098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:34.408141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:34.408286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:34.408319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:34.408468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:34.408510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:34.408541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:34.408576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:34.412392Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:34.412460Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:34.412697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.412738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:34.412793Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:34.412849Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:34.412886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:34.412927Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:34.412965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 351:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-02-19T08:29:40.448457Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.448480Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-02-19T08:29:40.448530Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-02-19T08:29:40.448550Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.448571Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-02-19T08:29:40.448618Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-02-19T08:29:40.448638Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.448659Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-02-19T08:29:40.448711Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-02-19T08:29:40.448748Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.448773Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-02-19T08:29:40.448840Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-02-19T08:29:40.448862Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.448883Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-02-19T08:29:40.448958Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-02-19T08:29:40.448981Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449003Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-02-19T08:29:40.449039Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-02-19T08:29:40.449058Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449079Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-02-19T08:29:40.449146Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-02-19T08:29:40.449173Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449202Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-02-19T08:29:40.449269Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-02-19T08:29:40.449295Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449318Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-02-19T08:29:40.449375Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-02-19T08:29:40.449394Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449418Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-02-19T08:29:40.449475Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-02-19T08:29:40.449494Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449512Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-02-19T08:29:40.449563Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-02-19T08:29:40.449583Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449602Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-02-19T08:29:40.449658Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-02-19T08:29:40.449678Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449698Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-02-19T08:29:40.449762Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-02-19T08:29:40.449787Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449806Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-02-19T08:29:40.449861Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-02-19T08:29:40.449882Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.449902Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-02-19T08:29:40.449962Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-02-19T08:29:40.449981Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.450002Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-02-19T08:29:40.450062Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-02-19T08:29:40.450087Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.450107Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-02-19T08:29:40.450170Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-02-19T08:29:40.450189Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.450211Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-02-19T08:29:40.450266Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-02-19T08:29:40.450287Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.450305Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-02-19T08:29:40.450350Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:29:40.450382Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:40.450403Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 30 31 28 19 27 29 31 31 29 25 25 20 30 28 29 30 28 22 17 22 28 28 25 30 25 25 22 20 - - 18 - actual 30 31 28 19 27 29 31 31 29 25 25 20 30 28 29 30 28 22 17 22 28 28 25 30 25 25 22 20 - - 18 - interm 3 0 5 5 5 3 4 4 5 4 4 4 2 1 3 4 5 - 3 - 3 5 5 5 5 1 4 4 - - - - >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::BlindWrite >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred >> KqpNamedExpressions::NamedExpressionSimple-UseSink [GOOD] >> KqpNewEngine::BatchUpload >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink >> KqpHashCombineReplacement::DqHashCombineTest-UseDqHashCombine-UseDqHashAggregate >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> KqpNewEngine::PkRangeSelect1 >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2026-02-19T08:28:37.114344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:37.114404Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:37.115177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:37.127527Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:37.127872Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:37.128101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:37.171854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:37.178161Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:37.178316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:37.179745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:37.179812Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:37.179854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:37.180263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:37.180337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:37.180418Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:37.243612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:37.272589Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:37.272800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:37.272896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:37.272949Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:37.272981Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:37.273009Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:37.273167Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:37.273209Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:37.273523Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:37.273622Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:37.273754Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:37.273788Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:37.273836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:37.273871Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:37.273914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:37.273944Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:37.273983Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:37.274077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:37.274115Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:37.274165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:37.280678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:37.280757Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:37.280865Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:37.281037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:37.281100Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:37.281165Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:37.281208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:37.281239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:37.281268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:37.281298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:37.281582Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:37.281625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:37.281667Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:37.281701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:37.281773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:37.281801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:37.281829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:37.281867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:37.281894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:37.294421Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:37.294501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:37.294579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:37.294636Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:37.294720Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:37.295358Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:37.295418Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:37.295484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:37.295632Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:37.295665Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:37.295837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:37.295884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:37.295940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:37.295983Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:37.300509Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:37.300583Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:37.300833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:37.300879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:37.300959Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:37.301004Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:37.301046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:37.301088Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:37.301126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-02-19T08:29:43.236636Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.236665Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-02-19T08:29:43.236746Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-02-19T08:29:43.236778Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.236808Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-02-19T08:29:43.236898Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-02-19T08:29:43.236930Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.236961Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-02-19T08:29:43.237048Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-02-19T08:29:43.237081Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237115Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-02-19T08:29:43.237209Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-02-19T08:29:43.237241Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237271Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-02-19T08:29:43.237360Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-02-19T08:29:43.237440Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237468Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-02-19T08:29:43.237559Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-02-19T08:29:43.237590Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237622Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-02-19T08:29:43.237706Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-02-19T08:29:43.237755Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237785Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-02-19T08:29:43.237870Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-02-19T08:29:43.237902Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.237931Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-02-19T08:29:43.238013Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-02-19T08:29:43.238064Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238102Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-02-19T08:29:43.238203Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-02-19T08:29:43.238237Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238269Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-02-19T08:29:43.238363Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-02-19T08:29:43.238397Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238426Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-02-19T08:29:43.238511Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-02-19T08:29:43.238543Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238574Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-02-19T08:29:43.238660Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-02-19T08:29:43.238690Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238720Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-02-19T08:29:43.238801Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-02-19T08:29:43.238837Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.238864Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-02-19T08:29:43.238945Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-02-19T08:29:43.238981Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.239010Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-02-19T08:29:43.239096Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-02-19T08:29:43.239125Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.239157Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-02-19T08:29:43.239243Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-02-19T08:29:43.239274Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.239304Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-02-19T08:29:43.239389Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-02-19T08:29:43.239422Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.239450Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-02-19T08:29:43.239532Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:240:2231], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:29:43.239561Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:43.239593Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 28 31 31 31 29 28 30 30 27 24 27 29 27 30 31 14 29 31 19 30 11 31 19 12 31 17 23 31 23 31 2 - actual 28 31 31 31 29 28 30 30 27 24 27 29 27 30 31 14 29 31 19 30 11 31 19 12 31 17 23 31 23 31 2 - interm 4 6 2 1 5 0 6 3 6 - 4 - - 5 5 - 6 6 - - - 2 - 2 - - - - - - 2 - >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SelectNotAllElements >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> KqpNamedExpressions::NamedExpressionSimple+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning-UseSink-UseDataQuery >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExprPk >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-02-19T08:29:03.682224Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:61:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:78:2057] recipient: [3:17:2064] 2026-02-19T08:29:03.973893Z node 4 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:54:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:61:2057] recipient: [4:54:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:78:2057] recipient: [4:17:2064] !Reboot 72057594037927937 (actor [4:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:80:2057] recipient: [4:42:2089] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:83:2057] recipient: [4:82:2114] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:85:2057] recipient: [4:82:2114] !Reboot 72057594037927937 (actor [4:60:2101]) rebooted! !Reboot 72057594037927937 (actor [4:60:2101]) tablet resolver refreshed! new actor is[4:84:2115] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:200:2057] recipient: [4:17:2064] 2026-02-19T08:29:05.867751Z node 5 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:61:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:78:2057] recipient: [5:17:2064] !Reboot 72057594037927937 (actor [5:60:2101]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:80:2057] recipient: [5:42:2089] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:83:2057] recipient: [5:82:2114] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:85:2057] recipient: [5:82:2114] !Reboot 72057594037927937 (actor [5:60:2101]) rebooted! !Reboot 72057594037927937 (actor [5:60:2101]) tablet resolver refreshed! new actor is[5:84:2115] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:200:2057] recipient: [5:17:2064] 2026-02-19T08:29:07.861655Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:56:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:61:2057] recipient: [6:56:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:78:2057] recipient: [6:17:2064] !Reboot 72057594037927937 (actor [6:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:81:2057] recipient: [6:42:2089] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:84:2057] recipient: [6:83:2114] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:86:2057] recipient: [6:83:2114] !Reboot 72057594037927937 (actor [6:60:2101]) rebooted! !Reboot 72057594037927937 (actor [6:60:2101]) tablet resolver refreshed! new actor is[6:85:2115] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:201:2057] recipient: [6:17:2064] 2026-02-19T08:29:09.740229Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:61:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:78:2057] recipient: [7:17:2064] !Reboot 72057594037927937 (actor [7:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:84:2057] recipient: [7:42:2089] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:60:2101]) rebooted! !Reboot 72057594037927937 (actor [7:60:2101]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:17:2064] 2026-02-19T08:29:11.840494Z node 8 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:56:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:61:2057] recipient: [8:56:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:78:2057] recipient: [8:17:2064] !Reboot 72057594037927937 (actor [8:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:84:2057] recipient: [8:42:2089] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:60:2101]) rebooted! !Reboot 72057594037927937 (actor [8:60:2101]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:17:2064] 2026-02-19T08:29:13.941561Z node 9 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:54:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:61:2057] recipient: [9:54:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:78:2057] recipient: [9:17:2064] !Reboot 72057594037927937 (actor [9:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:85:2057] recipient: [9:42:2089] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:60:2101]) rebooted! !Reboot 72057594037927937 (actor [9:60:2101]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:17:2064] 2026-02-19T08:29:15.830285Z node 10 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:61:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:78:2057] recipient: [10:17:2064] !Reboot 72057594037927937 (actor [10:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:88:2057] recipient: [10:42:2089] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:60:2101]) rebooted! !Reboot 72057594037927937 (actor [10:60:2101]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:17:2064] 2026-02-19T08:29:17.736225Z node 11 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:54:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:61:2057] recipient: [11:54:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:78:2057] recipient: [11:17:2064] !Reboot 72057594037927937 (actor [11:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:88:2057] recipient: [11:42:2089] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:60:2101]) rebooted! !Reboot 72057594037927937 (actor [11:60:2101]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:17:2064] 2026-02-19T08:29:19.819410Z node 12 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:61:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:78:2057] recipient: [12:17:2064] !Reboot 72057594037927937 (actor [12:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:89:2057] recipient: [12:42:2089] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:60:2101]) rebooted! !Reboot 72057594037927937 (actor [12:60:2101]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:212 ... pient: [18:42:2089] Leader for TabletID 72057594037927937 is [18:60:2101] sender: [18:100:2057] recipient: [18:99:2126] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:102:2057] recipient: [18:99:2126] !Reboot 72057594037927937 (actor [18:60:2101]) rebooted! !Reboot 72057594037927937 (actor [18:60:2101]) tablet resolver refreshed! new actor is[18:101:2127] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:217:2057] recipient: [18:17:2064] 2026-02-19T08:29:33.315184Z node 19 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:61:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:78:2057] recipient: [19:17:2064] !Reboot 72057594037927937 (actor [19:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:100:2057] recipient: [19:42:2089] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:103:2057] recipient: [19:102:2129] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:105:2057] recipient: [19:102:2129] !Reboot 72057594037927937 (actor [19:60:2101]) rebooted! !Reboot 72057594037927937 (actor [19:60:2101]) tablet resolver refreshed! new actor is[19:104:2130] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:220:2057] recipient: [19:17:2064] 2026-02-19T08:29:35.191252Z node 20 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:56:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:61:2057] recipient: [20:56:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:78:2057] recipient: [20:17:2064] !Reboot 72057594037927937 (actor [20:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:100:2057] recipient: [20:42:2089] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:103:2057] recipient: [20:102:2129] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:105:2057] recipient: [20:102:2129] !Reboot 72057594037927937 (actor [20:60:2101]) rebooted! !Reboot 72057594037927937 (actor [20:60:2101]) tablet resolver refreshed! new actor is[20:104:2130] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:220:2057] recipient: [20:17:2064] 2026-02-19T08:29:37.091201Z node 21 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:54:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:61:2057] recipient: [21:54:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:78:2057] recipient: [21:17:2064] !Reboot 72057594037927937 (actor [21:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:101:2057] recipient: [21:42:2089] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:104:2057] recipient: [21:103:2129] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:106:2057] recipient: [21:103:2129] !Reboot 72057594037927937 (actor [21:60:2101]) rebooted! !Reboot 72057594037927937 (actor [21:60:2101]) tablet resolver refreshed! new actor is[21:105:2130] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:221:2057] recipient: [21:17:2064] 2026-02-19T08:29:38.997731Z node 22 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:61:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:78:2057] recipient: [22:17:2064] !Reboot 72057594037927937 (actor [22:60:2101]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:102:2057] recipient: [22:42:2089] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:105:2057] recipient: [22:104:2130] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:107:2057] recipient: [22:104:2130] !Reboot 72057594037927937 (actor [22:60:2101]) rebooted! !Reboot 72057594037927937 (actor [22:60:2101]) tablet resolver refreshed! new actor is[22:106:2131] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:126:2057] recipient: [22:17:2064] 2026-02-19T08:29:39.263770Z node 23 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:61:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:78:2057] recipient: [23:17:2064] !Reboot 72057594037927937 (actor [23:60:2101]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:103:2057] recipient: [23:42:2089] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:106:2057] recipient: [23:105:2131] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:108:2057] recipient: [23:105:2131] !Reboot 72057594037927937 (actor [23:60:2101]) rebooted! !Reboot 72057594037927937 (actor [23:60:2101]) tablet resolver refreshed! new actor is[23:107:2132] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:127:2057] recipient: [23:17:2064] 2026-02-19T08:29:39.532315Z node 24 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:61:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:78:2057] recipient: [24:17:2064] !Reboot 72057594037927937 (actor [24:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:106:2057] recipient: [24:42:2089] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:109:2057] recipient: [24:108:2134] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:111:2057] recipient: [24:108:2134] !Reboot 72057594037927937 (actor [24:60:2101]) rebooted! !Reboot 72057594037927937 (actor [24:60:2101]) tablet resolver refreshed! new actor is[24:110:2135] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:226:2057] recipient: [24:17:2064] 2026-02-19T08:29:41.450226Z node 25 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:56:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:61:2057] recipient: [25:56:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:78:2057] recipient: [25:17:2064] !Reboot 72057594037927937 (actor [25:60:2101]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:106:2057] recipient: [25:42:2089] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:109:2057] recipient: [25:108:2134] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:111:2057] recipient: [25:108:2134] !Reboot 72057594037927937 (actor [25:60:2101]) rebooted! !Reboot 72057594037927937 (actor [25:60:2101]) tablet resolver refreshed! new actor is[25:110:2135] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:226:2057] recipient: [25:17:2064] 2026-02-19T08:29:43.345359Z node 26 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:54:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:61:2057] recipient: [26:54:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:78:2057] recipient: [26:17:2064] !Reboot 72057594037927937 (actor [26:60:2101]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:106:2057] recipient: [26:42:2089] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:109:2057] recipient: [26:108:2134] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:111:2057] recipient: [26:108:2134] !Reboot 72057594037927937 (actor [26:60:2101]) rebooted! !Reboot 72057594037927937 (actor [26:60:2101]) tablet resolver refreshed! new actor is[26:110:2135] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:226:2057] recipient: [26:17:2064] 2026-02-19T08:29:45.451956Z node 27 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:56:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:61:2057] recipient: [27:56:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:78:2057] recipient: [27:17:2064] !Reboot 72057594037927937 (actor [27:60:2101]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:111:2057] recipient: [27:42:2089] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:114:2057] recipient: [27:113:2138] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:116:2057] recipient: [27:113:2138] !Reboot 72057594037927937 (actor [27:60:2101]) rebooted! !Reboot 72057594037927937 (actor [27:60:2101]) tablet resolver refreshed! new actor is[27:115:2139] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:231:2057] recipient: [27:17:2064] 2026-02-19T08:29:47.542998Z node 28 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:54:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:61:2057] recipient: [28:54:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:78:2057] recipient: [28:17:2064] >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::Aggregate >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2026-02-19T08:28:37.873287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:37.873340Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:37.874159Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:37.884183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:37.884534Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:37.884795Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:37.922709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:37.929463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:37.929664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:37.931065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:37.931123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:37.931185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:37.931535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:37.931599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:37.931663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:37.997543Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:38.022880Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:38.023051Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:38.023127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:38.023170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:38.023198Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:38.023224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:38.023350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.023392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.023623Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:38.023700Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:38.023778Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:38.023808Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:38.023846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:38.023881Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:38.023919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:38.023947Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:38.023981Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:38.024052Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.024084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.024132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:38.030360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:38.030445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:38.030529Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:38.030684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:38.030734Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:38.030793Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:38.030862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:38.030903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:38.030940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:38.030977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:38.031269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:38.031333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:38.031374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:38.031412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:38.031470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:38.031505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:38.031540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:38.031574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:38.031600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:38.043598Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:38.043665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:38.043707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:38.043744Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:38.043812Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:38.044279Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.044339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.044395Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:38.044535Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:38.044568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:38.044710Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:38.044747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:38.044788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:38.044836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:38.048034Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:38.048091Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:38.048261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.048300Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.048343Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:38.048383Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:38.048410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:38.048460Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:38.048507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-02-19T08:29:47.635073Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635103Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-02-19T08:29:47.635189Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-02-19T08:29:47.635220Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635250Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-02-19T08:29:47.635334Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-02-19T08:29:47.635367Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635395Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-02-19T08:29:47.635481Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-02-19T08:29:47.635515Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635545Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-02-19T08:29:47.635627Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-02-19T08:29:47.635704Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635735Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-02-19T08:29:47.635822Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-02-19T08:29:47.635854Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.635885Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-02-19T08:29:47.635970Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-02-19T08:29:47.636010Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636038Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-02-19T08:29:47.636135Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-02-19T08:29:47.636171Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636204Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-02-19T08:29:47.636310Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-02-19T08:29:47.636343Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636373Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-02-19T08:29:47.636435Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-02-19T08:29:47.636464Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636496Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-02-19T08:29:47.636576Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-02-19T08:29:47.636609Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636639Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-02-19T08:29:47.636765Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-02-19T08:29:47.636800Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.636831Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-02-19T08:29:47.636929Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-02-19T08:29:47.636976Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637007Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-02-19T08:29:47.637066Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-02-19T08:29:47.637097Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637136Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-02-19T08:29:47.637223Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-02-19T08:29:47.637261Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637291Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-02-19T08:29:47.637376Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-02-19T08:29:47.637411Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637440Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-02-19T08:29:47.637522Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-02-19T08:29:47.637554Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637585Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-02-19T08:29:47.637669Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-02-19T08:29:47.637702Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637750Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-02-19T08:29:47.637836Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-02-19T08:29:47.637921Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.637953Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-02-19T08:29:47.638036Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:29:47.638069Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:47.638096Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 24 29 22 28 18 22 29 27 29 21 13 26 31 21 21 21 24 19 29 29 29 17 24 29 12 29 29 21 12 12 9 - actual 24 29 22 28 18 22 29 27 29 21 13 26 31 21 21 21 24 19 29 29 29 17 24 29 12 29 29 21 12 12 9 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpAgg::ScalarAggregationResult [GOOD] >> KqpExtractPredicateLookup::OverflowLookup >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseRangeOptimized >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::OnlineRO_Consistent >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::SecondaryIndex_PgKey >> KqpNewEngine::MultiSelect >> KqpHashCombineReplacement::DqHashCombineTest-UseDqHashCombine-UseDqHashAggregate [GOOD] >> KqpHashCombineReplacement::DqHashCombineTest+UseDqHashCombine-UseDqHashAggregate >> THealthCheckTest::TestStateStorageRed [GOOD] >> KqpNotNullColumns::InsertNotNullPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2026-02-19T08:28:38.725376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:38.725438Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:38.726320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:38.739427Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:38.739760Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:38.739999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:38.784066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:38.791640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:38.791824Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:38.793653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:38.793751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:38.793809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:38.794308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:38.794392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:38.794481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:38.862691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:38.889354Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:38.889526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:38.889612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:38.889654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:38.889685Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:38.889727Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:38.889876Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.889932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.890266Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:38.890362Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:38.890461Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:38.890504Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:38.890577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:38.890617Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:38.890662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:38.890696Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:38.890743Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:38.890847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.890898Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.890954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:38.896063Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:38.896144Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:38.896223Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:38.896359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:38.896410Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:38.896463Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:38.896510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:38.896537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:38.896567Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:38.896604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:38.896861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:38.896904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:38.896948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:38.896980Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:38.897043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:38.897071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:38.897104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:38.897151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:38.897184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:38.909764Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:38.909850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:38.909928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:38.909974Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:38.910061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:38.910623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.910693Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:38.910747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:38.910877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:38.910901Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:38.911052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:38.911100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:38.911137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:38.911178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:38.915056Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:38.915135Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:38.915346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.915387Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:38.915493Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:38.915538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:38.915574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:38.915612Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:38.915648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [32:351:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-02-19T08:29:50.077921Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.077948Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-02-19T08:29:50.078031Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-02-19T08:29:50.078071Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078100Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-02-19T08:29:50.078180Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-02-19T08:29:50.078212Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078239Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-02-19T08:29:50.078341Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-02-19T08:29:50.078382Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078410Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-02-19T08:29:50.078513Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-02-19T08:29:50.078546Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078575Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-02-19T08:29:50.078657Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-02-19T08:29:50.078688Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078713Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-02-19T08:29:50.078790Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-02-19T08:29:50.078820Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.078847Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-02-19T08:29:50.078944Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-02-19T08:29:50.078982Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079010Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-02-19T08:29:50.079118Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-02-19T08:29:50.079148Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079178Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-02-19T08:29:50.079234Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-02-19T08:29:50.079264Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079292Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-02-19T08:29:50.079374Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-02-19T08:29:50.079405Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079433Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-02-19T08:29:50.079532Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-02-19T08:29:50.079569Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079596Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-02-19T08:29:50.079714Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-02-19T08:29:50.079744Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079772Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-02-19T08:29:50.079827Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-02-19T08:29:50.079854Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.079880Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-02-19T08:29:50.079967Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-02-19T08:29:50.080001Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080028Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-02-19T08:29:50.080123Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-02-19T08:29:50.080156Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080184Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-02-19T08:29:50.080265Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-02-19T08:29:50.080295Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080324Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-02-19T08:29:50.080404Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-02-19T08:29:50.080444Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080471Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-02-19T08:29:50.080587Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-02-19T08:29:50.080624Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080652Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-02-19T08:29:50.080778Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [32:809:2734], Recipient [32:351:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:29:50.080812Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:29:50.080841Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 29 26 28 27 27 30 31 24 24 31 24 25 31 30 24 31 24 22 31 24 19 30 24 22 24 24 - 15 - - - - actual 29 26 28 27 27 30 31 24 24 31 24 25 31 30 24 31 24 22 31 24 19 30 24 22 24 24 - 15 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:93:2057] recipient: [26:92:2121] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:95:2057] recipient: [26:92:2121] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:94:2122] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:210:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:54:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:91:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:94:2057] recipient: [27:93:2121] Leader for TabletID 72057594037927937 is [27:95:2122] sender: [27:96:2057] recipient: [27:93:2121] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:52:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:52:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:78:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:81:2057] recipient: [30:80:2112] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:83:2057] recipient: [30:80:2112] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:82:2113] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:198:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:78:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:81:2057] recipient: [31:80:2112] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:83:2057] recipient: [31:80:2112] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:82:2113] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:198:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:79:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:84:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:83:2113] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:199:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:82:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:85:2057] recipient: [34:84:2115] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:87:2057] recipient: [34:84:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:86:2116] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:202:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:83:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:86:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:88:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:87:2116] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:203:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2118] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:91:2057] recipient: [36:88:2118] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2119] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2118] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:91:2057] recipient: [37:88:2118] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2119] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:206:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:87:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:90:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:92:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:91:2119] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:207:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:06.424277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:06.516935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:06.523211Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:06.523617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:06.523806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:06.937789Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:07.047279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:07.047421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:07.110099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:07.215775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:08.104182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:08.104244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:08.104277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:08.104752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... blocking NKikimr::NSchemeShard::TEvSchemeShard::TEvDescribeScheme from MONITORING_REQUEST to FLAT_SCHEMESHARD_ACTOR cookie 1 2026-02-19T08:27:08.167047Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:17.421815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:27:17.421897Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-02-19T08:27:27.952937Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:27.953105Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:27.964795Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:27.967051Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:27.969031Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:860:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:27.969478Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:27.969890Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:27.970411Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:855:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:27.970888Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:27.971073Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:28.257861Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:28.361524Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:28.361641Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:28.362090Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:28.362152Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:28.406013Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:27:28.406638Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:28.406942Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:28.462292Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:27:28.476351Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:32.480203Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:32.487906Z node 3 :HIVE DEBUG: hive_impl.cpp:768: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 3: Status: 2 2026-02-19T08:27:32.488056Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(3)::Execute 2026-02-19T08:27:32.488113Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-02-19T08:27:32.488221Z node 3 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(3)::Complete 2026-02-19T08:27:32.491803Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:32.491878Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:32.491920Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:32.492972Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:32.493441Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(3)::Execute 2026-02-19T08:27:32.493578Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:27:32.493625Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(3) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2026-02-19T08:27:32.493682Z node 3 :HIVE DEBUG: hive_impl.cpp:2895: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 3) 2026-02-19T08:27:32.493746Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1241:2643] 2026-02-19T08:27:32.493799Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(3): waiting 3600.000000s 2026-02-19T08:27:32.494856Z node 3 :HIVE TRACE: hive_impl.cpp:150: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([3:890:2423]) [3:1241:2643] 2026-02-19T08:27:32.495999Z node 3 :HIVE TRACE: hive_impl.cpp:142: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1722:3005]) [3:1956:3009] 2026-02-19T08:27:32.496112Z node 3 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2026-02-19T08:27:32.513000Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2026-02-19T08:27:32.513121Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-02-19T08:27:32.513405Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2026-02-19T08:27:32.513486Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-02-19T08:27:32.513685Z node 3 :HIVE DEBUG: hive_impl.cpp:2908: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2026-02-19T08:27:32.513771Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-02-19T08:27:32.513916Z node 3 :HIVE DEBUG: hive_impl.cpp:1112: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-02-19T08:27:32.514885Z node 3 :HIVE DEBUG: hive_impl.cpp:768: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 4: Status: 2 2026-02-19T08:27:32.514967Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(4)::Execute 2026-02-19T08:27:32.515003Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-02-19T08:27:32.515383Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(4)::Execute 2026-02-19T08:27:32.515474Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-02-19T08:27:32.515508Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(4) DeregisterInDomain ... for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:29:43.974090Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:29:43.974207Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:29:44.327809Z node 29 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:44.534544Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.534701Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.535683Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.535776Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.536095Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.536190Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.536514Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.536596Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.536875Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.536963Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.537289Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.537369Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.537667Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.537760Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.538069Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.538151Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.538443Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.538513Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.645186Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2026-02-19T08:29:44.645726Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 31 Cookie 31 2026-02-19T08:29:44.645949Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 32 Cookie 32 2026-02-19T08:29:44.646086Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 33 Cookie 33 2026-02-19T08:29:44.646255Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2026-02-19T08:29:44.646426Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2026-02-19T08:29:44.646551Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2026-02-19T08:29:44.646737Z node 29 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2026-02-19T08:29:44.647161Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.648101Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.648401Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.648627Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.648863Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.649213Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.649462Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.649670Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.649896Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.720075Z node 32 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:44.736832Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:44.799024Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:44.888912Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:44.926604Z node 30 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:44.986472Z node 33 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:45.001452Z node 29 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:45.032719Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:45.050755Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:29:46.029087Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:46.029148Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:46.029177Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:46.029702Z node 29 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:46.115056Z node 30 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115192Z node 29 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115268Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115346Z node 32 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115452Z node 33 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115539Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115615Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.115712Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.116133Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-29" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-29" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-29" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 29 host: "::1" port: 12001 } >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:40.876651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:40.982759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:40.990706Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:40.991032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:40.991228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:41.308728Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:41.391098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:41.391216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:41.450848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:41.548761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.201401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.201882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.201926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.201961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.202487Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:42.266266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:42.782174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:45.591882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:45.599481Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:45.603180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:45.633037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:45.633144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:45.682087Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:45.684070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:45.857201Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:45.857306Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:45.858796Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.859468Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.860158Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.860903Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.861204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.861326Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.861549Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.861700Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.862000Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:45.877939Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.053755Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.096625Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.096716Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.130031Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.130237Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.130436Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.130480Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.130515Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.130561Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.130599Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.130632Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.130982Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.135117Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.135200Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.155486Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.156263Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.215906Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.218567Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.227637Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.227683Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.227741Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.233642Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.237566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.245163Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.245298Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:46.259616Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:46.461279Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:46.532674Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:46.841892Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:46.963303Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:46.963416Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:47.557746Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... 83:3995] SUCCESS 2026-02-19T08:28:00.600628Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:00.601962Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3180:3445] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-02-19T08:28:00.602048Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: "operationId", DatabaseName: `', Types: 1 2026-02-19T08:28:00.602118Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: "operationId", PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-02-19T08:28:00.650173Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:00.650287Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:56: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId "operationId" , ActorId=[1:3180:3445] 2026-02-19T08:28:00.650354Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:112: [72075186224037894] TTxAnalyze::Complete 2026-02-19T08:28:00.650700Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:28:00.650936Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 34], AnalyzeActorId: [2:4517:4019] 2026-02-19T08:28:00.651741Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:28:00.656387Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table` 2026-02-19T08:28:00.656578Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], Start read next stream part 2026-02-19T08:28:00.722722Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4530:4031]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:00.723030Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:28:00.723101Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [2:4530:4031], StatRequests.size() = 1 2026-02-19T08:29:04.592507Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:29:04.592675Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], Start read next stream part 2026-02-19T08:29:04.592809Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg6q8g13543wjesmzpsg7x", SessionId: ydb://session/3?node_id=2&id=Mjc2ZDVjYmUtMjgyMWUzYjEtNWZmYmYzNTItNmM2NmQ2ZWY=, Slow query, duration: 63.920975s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:29:04.593869Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:29:04.594073Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:29:04.594115Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4519:4021], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTEyZTM1YzQtMWYxNjMzODItMzkyOGY2ZmEtMzY3OTljMjE=, TxId: 2026-02-19T08:29:04.594209Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-02-19T08:29:04.596736Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:29:04.596816Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], Start read next stream part 2026-02-19T08:29:04.641384Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4640:4130]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:29:04.641612Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-02-19T08:29:04.641641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 5, ReplyToActorId = [2:4640:4130], StatRequests.size() = 1 2026-02-19T08:29:45.589274Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:29:45.589404Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], Start read next stream part 2026-02-19T08:29:45.589594Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg8npm29v2tkasxt90ggmn", SessionId: ydb://session/3?node_id=2&id=N2RkYjFlMDQtMmE5YzM1MjQtYTQ2NjkwYzAtNmRhOTI4MTU=, Slow query, duration: 40.990190s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:29:45.592163Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:29:45.592226Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4517:4019], ActorId: [2:4622:4115], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjY2MmNiMDQtZmQxMDY3NGMtNjIyZmY3ZTUtMmQzNjM4OTM=, TxId: 2026-02-19T08:29:45.592322Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-02-19T08:29:45.592594Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4729:4213], ActorId: [2:4731:4215], Starting query actor #1 [2:4732:4216] 2026-02-19T08:29:45.592656Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4731:4215], ActorId: [2:4732:4216], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:29:45.597163Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4731:4215], ActorId: [2:4732:4216], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NDdmMTFjMWItNjBkOTM5MWUtNDEwZjc0MzItYzlhMjcyZDE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:29:45.639370Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4731:4215], ActorId: [2:4732:4216], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDdmMTFjMWItNjBkOTM5MWUtNDEwZjc0MzItYzlhMjcyZDE=, TxId: 2026-02-19T08:29:45.639441Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4731:4215], ActorId: [2:4732:4216], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDdmMTFjMWItNjBkOTM5MWUtNDEwZjc0MzItYzlhMjcyZDE=, TxId: 2026-02-19T08:29:45.639799Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4729:4213], ActorId: [2:4731:4215], Got response [2:4732:4216] SUCCESS 2026-02-19T08:29:45.640149Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:29:45.669148Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:29:45.669246Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3180:3445] 2026-02-19T08:29:45.728672Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:29:45.728777Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:29:45.798062Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:4258:3789], schemeshard count = 1 2026-02-19T08:29:50.950551Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-02-19T08:29:50.950800Z node 2 :STATISTICS DEBUG: service_impl.cpp:874: EvPropagateStatistics, node id: 2 cookie: 1 2026-02-19T08:29:50.950918Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-02-19T08:29:50.961931Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8677: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-02-19T08:29:50.962031Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8514: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-02-19T08:29:50.962352Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-02-19T08:29:50.976680Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete >> KqpNewEngine::LocksSingleShard >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService |94.9%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp64_Limit2 >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> KqpNamedExpressions::NamedExpressionRandomChanged-UseSink >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> KqpRanges::WhereInSubquery >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> KqpRanges::UpdateWhereInNoFullScan+UseSink >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> KqpBatchDelete::SimplePartitions [GOOD] >> KqpBatchDelete::TableNotExists >> KqpNamedExpressions::NamedExpressionRandomInsertDataQuery-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex-UseSink-UseDataQuery >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TFlatTest::SplitEmptyTwice [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... r refreshed! new actor is[31:83:2113] Leader for TabletID 72057594037927937 is [31:83:2113] sender: [31:199:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:85:2057] recipient: [32:84:2115] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:87:2057] recipient: [32:84:2115] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:86:2116] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:202:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:83:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:86:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:88:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:87:2116] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:105:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:85:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:88:2057] recipient: [36:87:2117] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:90:2057] recipient: [36:87:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:89:2118] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:205:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2117] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:91:2057] recipient: [37:88:2117] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2118] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:108:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:88:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:91:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:93:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:92:2120] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:208:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:88:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:91:2057] recipient: [39:90:2119] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:93:2057] recipient: [39:90:2119] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:92:2120] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:208:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:89:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:92:2057] recipient: [40:91:2119] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:94:2057] recipient: [40:91:2119] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:93:2120] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:209:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:92:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:95:2057] recipient: [41:94:2122] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:97:2057] recipient: [41:94:2122] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:96:2123] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:212:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:92:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:95:2057] recipient: [42:94:2122] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:97:2057] recipient: [42:94:2122] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:96:2123] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:212:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] >> KqpHashCombineReplacement::DqHashCombineTest+UseDqHashCombine-UseDqHashAggregate [GOOD] >> KqpHashCombineReplacement::DqHashCombineTest-UseDqHashCombine+UseDqHashAggregate >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> TKeyValueTest::TestGetStatusWorks [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksMultiShard >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 1:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:93:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:92:2120] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:208:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:88:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:91:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:93:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:92:2120] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:208:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:54:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:89:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:92:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:94:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:93:2120] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:209:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:92:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:95:2057] recipient: [31:94:2122] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:97:2057] recipient: [31:94:2122] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:96:2123] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:212:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:54:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:92:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:95:2057] recipient: [32:94:2122] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:97:2057] recipient: [32:94:2122] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:96:2123] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:212:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:78:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:81:2057] recipient: [35:80:2112] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:83:2057] recipient: [35:80:2112] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:82:2113] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:198:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:79:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:82:2057] recipient: [37:81:2112] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:84:2057] recipient: [37:81:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:83:2113] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:199:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:81:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:84:2057] recipient: [38:83:2114] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:86:2057] recipient: [38:83:2114] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:85:2115] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:201:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:84:2057] recipient: [39:83:2114] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:86:2057] recipient: [39:83:2114] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:85:2115] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:201:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2114] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:87:2057] recipient: [40:84:2114] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2115] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] >> KqpRanges::IsNullPartial >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortTableExprOffset >> KqpBatchUpdate::ManyPartitions_1 [GOOD] >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> KqpRanges::WhereInSubquery [GOOD] >> KqpRanges::UpdateWhereInWithNull >> KqpMergeCn::TopSortBy_Timestamp64_Limit2 [GOOD] >> KqpNamedExpressions::NamedExpressionChanged+UseSink >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink >> KqpNamedExpressions::NamedExpressionRandomChanged-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomChanged2+UseSink >> TKeyValueTest::TestConcatToLongKey [GOOD] >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral >> KqpNewEngine::ReadAfterWrite >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: 2026-02-19T08:26:08.230711Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489761778798690:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:08.230781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:08.427673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:08.445078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:08.445172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:08.488763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:08.512840Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:08.513840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489761778798660:2081] 1771489568229301 != 1771489568229304 2026-02-19T08:26:08.556778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:08.556799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:08.556808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:08.556895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:08.682680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:10.531648Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006ad/r3tmp/spilling-tmp-runner/node_1_48323056-92739a04-49385a8d-373d44d4, actor: [1:7608489770368735084:2384] 2026-02-19T08:26:10.531833Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006ad/r3tmp/spilling-tmp-runner 2026-02-19T08:26:12.082491Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082539Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082556Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082571Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082585Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082605Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082626Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082644Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082662Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082713Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082736Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:12.082779Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3c8gfccta23yefgypgnh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYjlmYTAtOWI5MTk4OTgtMmUxMjNkYjEtOTc0Y2RkNDg=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:12.083010Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489778958670747:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3c8gfccta23yefgypgnh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYjlmYTAtOWI5MTk4OTgtMmUxMjNkYjEtOTc0Y2RkNDg=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:12.083047Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489778958670747:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3c8gfccta23yefgypgnh, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWNjYjlmYTAtOWI5MTk4OTgtMmUxMjNkYjEtOTc0Y2RkNDg=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:12.083429Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:26:12.083557Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083601Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083639Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083679Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083722Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083772Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083814Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083852Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083886Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:12.083925Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... ws: 19 ComputeCpuTimeUs: 86 BuildCpuTimeUs: 114 WaitOutputTimeUs: 4597 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799163 CreateTimeMs: 1771489799158 UpdateTimeMs: 1771489799166 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.167798Z node 20 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [20:7608490753190870660:2719] 2026-02-19T08:29:59.167846Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], CA [20:7608490753190870657:2716], trace_id# 2026-02-19T08:29:59.167889Z node 20 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], CA [20:7608490753190870657:2716], trace_id# 2026-02-19T08:29:59.168087Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [20:7608490753190870657:2716] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1822 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 1136 FinishTimeMs: 1771489799166 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 30 ReadBytes: 240 AffectedPartitions: 3 } IngressRows: 30 ComputeCpuTimeUs: 93 BuildCpuTimeUs: 1043 WaitOutputTimeUs: 5341 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799163 CreateTimeMs: 1771489799157 UpdateTimeMs: 1771489799166 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.168129Z node 20 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [20:7608490753190870657:2716] 2026-02-19T08:29:59.168174Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.168214Z node 20 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.204620Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [20:7608490753190870658:2717] TaskId# 2 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 1348 Tasks { TaskId: 2 CpuTimeUs: 1016 FinishTimeMs: 1771489799204 IngressRows: 30 ComputeCpuTimeUs: 182 BuildCpuTimeUs: 834 WaitOutputTimeUs: 2989 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799204 CreateTimeMs: 1771489799157 UpdateTimeMs: 1771489799204 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.204731Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.204816Z node 20 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.205559Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [20:7608490753190870661:2720] TaskId# 5 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 2186 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 343 FinishTimeMs: 1771489799204 InputBytes: 4 ComputeCpuTimeUs: 225 BuildCpuTimeUs: 118 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799204 CreateTimeMs: 1771489799158 UpdateTimeMs: 1771489799204 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.205645Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.205737Z node 20 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [20:7608490753190870658:2717], CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.205936Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [20:7608490753190870658:2717] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 44356 Tasks { TaskId: 2 CpuTimeUs: 1023 FinishTimeMs: 1771489799204 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 30 ReadBytes: 240 AffectedPartitions: 3 } IngressRows: 30 ComputeCpuTimeUs: 189 BuildCpuTimeUs: 834 WaitOutputTimeUs: 2989 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799204 CreateTimeMs: 1771489799157 UpdateTimeMs: 1771489799204 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.206014Z node 20 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [20:7608490753190870658:2717] 2026-02-19T08:29:59.206069Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.206124Z node 20 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [20:7608490753190870661:2720], trace_id# 2026-02-19T08:29:59.206278Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [20:7608490753190870661:2720] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 46039 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 349 FinishTimeMs: 1771489799204 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 231 BuildCpuTimeUs: 118 HostName: "ghrun-fkrokuvqke" NodeId: 20 StartTimeMs: 1771489799204 CreateTimeMs: 1771489799158 UpdateTimeMs: 1771489799205 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:59.206319Z node 20 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [20:7608490753190870661:2720] 2026-02-19T08:29:59.206486Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:29:59.206551Z node 20 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:29:59.206611Z node 20 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [20:7608490753190870650:2545] TxId: 281474976710715. Ctx: { TraceId: 01khtgaa52dpa68fn9gpd1hn9n, Database: /Root, SessionId: ydb://session/3?node_id=20&id=YWQ0ZTJkZjktZTBiNmM5ZjgtOTZkYTQ5OTEtM2Q1NmI1NQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.093663s ReadRows: 99 ReadBytes: 792 RequestUnits# 99 ForceFlag# true trace_id# >> KqpBatchDelete::TableNotExists [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopSortExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:52:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:52:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:78:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:81:2057] recipient: [37:80:2112] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:83:2057] recipient: [37:80:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:82:2113] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:198:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:79:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:84:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:83:2113] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:199:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:82:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:85:2057] recipient: [39:84:2115] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:87:2057] recipient: [39:84:2115] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:86:2116] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:202:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2115] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:87:2057] recipient: [40:84:2115] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2116] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:83:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:86:2057] recipient: [41:85:2115] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:88:2057] recipient: [41:85:2115] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:87:2116] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:203:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:86:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:89:2057] recipient: [42:88:2118] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:91:2057] recipient: [42:88:2118] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:90:2119] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:206:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:86:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:89:2057] recipient: [43:88:2118] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:91:2057] recipient: [43:88:2118] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:90:2119] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:206:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:87:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:90:2057] recipient: [44:89:2118] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:92:2057] recipient: [44:89:2118] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:91:2119] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:207:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2026-02-19T08:27:30.309511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490115937002633:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:27:30.311216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:27:30.523982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:27:30.541464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:30.541557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:30.611366Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:30.613668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:30.617834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490115937002600:2081] 1771489650299551 != 1771489650299554 2026-02-19T08:27:30.783164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:27:30.848448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:30.917834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:27:31.102374Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-02-19T08:27:31.106947Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-02-19T08:27:31.131582Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-02-19T08:27:31.135704Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-02-19T08:27:31.312556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:33.268755Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.78, eph 1} end=Done, 2 blobs 200r (max 200), put Spent{time=0.014s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (58053 0 0)b }, ecr=1.000 2026-02-19T08:27:33.325040Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.105, eph 1} end=Done, 2 blobs 789r (max 789), put Spent{time=0.015s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (51817 0 0)b }, ecr=1.000 2026-02-19T08:27:33.378952Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.589, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-02-19T08:27:33.437238Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.176, eph 2} end=Done, 2 blobs 451r (max 452), put Spent{time=0.011s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (130540 0 0)b }, ecr=1.000 2026-02-19T08:27:33.464351Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.194, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.013s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-02-19T08:27:33.479608Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.195, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.027s,wait=0.027s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208 0 0)b }, ecr=1.000 2026-02-19T08:27:33.481097Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.196, eph 1} end=Done, 2 blobs 501r (max 501), put Spent{time=0.029s,wait=0.022s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2026-02-19T08:27:33.500235Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.197, eph 1} end=Done, 2 blobs 1500r (max 1500), put Spent{time=0.046s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2026-02-19T08:27:33.560897Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.209, eph 2} end=Done, 2 blobs 1557r (max 1560), put Spent{time=0.074s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (102115 0 0)b }, ecr=1.000 2026-02-19T08:27:33.580188Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.730, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.115s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-02-19T08:27:33.600838Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.284, eph 3} end=Done, 2 blobs 702r (max 703), put Spent{time=0.021s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (203072 0 0)b }, ecr=1.000 2026-02-19T08:27:33.666112Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.315, eph 3} end=Done, 2 blobs 2310r (max 2313), put Spent{time=0.022s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (151438 0 0)b }, ecr=1.000 2026-02-19T08:27:33.699140Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1099, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-02-19T08:27:33.779559Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.380, eph 4} end=Done, 2 blobs 953r (max 954), put Spent{time=0.033s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (275559 0 0)b }, ecr=1.000 2026-02-19T08:27:33.825486Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.395, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.045s,wait=0.038s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-02-19T08:27:33.826943Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.396, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.046s,wait=0.035s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208 0 0)b }, ecr=1.000 2026-02-19T08:27:33.876558Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.397, eph 2} end=Done, 2 blobs 1002r (max 1002), put Spent{time=0.096s,wait=0.012s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63729 0 0)b }, ecr=1.000 2026-02-19T08:27:33.878819Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.394, eph 2} end=Done, 2 blobs 3003r (max 3003), put Spent{time=0.099s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206156 0 0)b }, ecr=1.000 2026-02-19T08:27:33.911051Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.409, eph 4} end=Done, 2 blobs 3078r (max 3081), put Spent{time=0.067s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (201736 0 0)b }, ecr=1.000 2026-02-19T08:27:33.939285Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1238, eph 2} end=Done, 2 blobs 10001r (max 10502), put Spent{time=0.148s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-02-19T08:27:33.992696Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.476, eph 5} end=Done, 2 blobs 1205r (max 1206), put Spent{time=0.020s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (348378 0 0)b }, ecr=1.000 2026-02-19T08:27:34.065001Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1605, eph 3} end=Done, 2 blobs 3r (max 5), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-02-19T08:27:34.066217Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.506, eph 5} end=Done, 2 blobs 3843r (max 3846), put Spent{time=0.051s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (251839 0 0)b }, ecr=1.000 2026-02-19T08:27:34.145540Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.563, eph 6} end=Done, 2 blobs 1457r (max 1458), put Spent{time=0.036s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (421152 0 0)b }, ecr=1.000 2026-02-19T08:27:34.157354Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.586, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-02-19T08:27:34.158543Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.587, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.014s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208 0 0)b }, ecr=1.000 2026-02-19T08:27:34.192427Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.588, eph 3} end=Done, 2 blobs 1503r (max 1503), put Spent{time=0.047s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (95542 0 0)b }, ecr=1.000 2026-02-19T08:27:34.209146Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.584, eph 3} end=Done, 2 blobs 4503r (max 4503), put Spent{time=0.072s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (309038 0 0)b }, ecr=1.000 2026-02-19T08:27:34.241024Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.610, eph 6} end=Done, 2 blobs 4617r (max 4620), put Spent{time=0.049s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (302527 0 0)b }, ecr=1.000 2026-02-19T08:27:34.248535Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1744, eph 3} end=Done, 2 blobs 10001r (max 10501), put Spent{time=0.103s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-02-19T08:27:34.308267Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.670, eph 7} end=Done, 2 blobs 1713r (max 1714), put Spent{time=0.031s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (495119 0 0)b }, ecr=1.000 2026-02-19T08:27:34.344661Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.696, eph 7} end=Done, 2 blobs 5370r (max 5373), put Spent{time=0.034s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (351850 0 0)b }, ecr=1.000 2026-02-19T08:27:34.348416Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.2111, eph 4} end=Done, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-02-19T08:27:34.435511Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.771, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.021s,wait=0.012s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-02-19T08:27:34.449435Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.772, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.034s,wait=0.026s,interrupts=1} Part{ 1 pk, lobs 0 +0, (208 0 0)b }, ecr=1.000 2026-02-19T08:27:34.461980Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.753, eph 8} end=Done, 2 blobs 1964r (max 1965), put Spent{time=0.068s,wait=0.022s,interrupts=1} Part{ 1 pk, lobs 0 +0, (567606 0 0)b }, ecr=1.000 2026-02-19T08:27:34.489082Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.773, eph 4} end=Done, 2 blobs 2004r (max 2004), put Spent{time=0.074s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (127305 0 0)b }, ecr=1.000 2026-02-19T08:27:34.503430Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.768, eph 4} end=Done, 2 blobs 6006r (max 6006), put Spent{t ... :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:53.436364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490723522622690 RawX2: 4503608217307379 } TabletId: 72075186224037889 State: 4 2026-02-19T08:29:53.436479Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.436758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490727817590467 RawX2: 4503608217307456 } TabletId: 72075186224037893 State: 4 2026-02-19T08:29:53.436808Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.437006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490727817590469 RawX2: 4503608217307457 } TabletId: 72075186224037895 State: 4 2026-02-19T08:29:53.437078Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.437251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490727817590291 RawX2: 4503608217307438 } TabletId: 72075186224037891 State: 4 2026-02-19T08:29:53.437326Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.437542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490727817590288 RawX2: 4503608217307437 } TabletId: 72075186224037890 State: 4 2026-02-19T08:29:53.437585Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.437776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6047: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7608490727817590472 RawX2: 4503608217307458 } TabletId: 72075186224037894 State: 4 2026-02-19T08:29:53.437811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-02-19T08:29:53.438043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.438273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.438434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.438556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.438676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.438837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-02-19T08:29:53.438868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-02-19T08:29:53.440498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-02-19T08:29:53.440747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-02-19T08:29:53.441401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-02-19T08:29:53.441659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-02-19T08:29:53.441900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-02-19T08:29:53.442086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-02-19T08:29:53.442282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-02-19T08:29:53.442536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:29:53.442796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-02-19T08:29:53.443091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-02-19T08:29:53.443349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6462: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-02-19T08:29:53.443568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-02-19T08:29:53.443839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-02-19T08:29:53.443894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-02-19T08:29:53.443981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-02-19T08:29:53.448128Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-02-19T08:29:53.448197Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-02-19T08:29:53.448267Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2026-02-19T08:29:53.448323Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-02-19T08:29:53.450874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:2 2026-02-19T08:29:53.450921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-02-19T08:29:53.451002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:6 2026-02-19T08:29:53.451030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-02-19T08:29:53.451098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:8 2026-02-19T08:29:53.451120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-02-19T08:29:53.452010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:4 2026-02-19T08:29:53.452042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-02-19T08:29:53.452097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:3 2026-02-19T08:29:53.452117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-02-19T08:29:53.452172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Deleted shardIdx 72057594046644480:7 2026-02-19T08:29:53.452225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:188: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-02-19T08:29:53.452344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-02-19T08:29:53.456045Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-02-19T08:29:53.456112Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2026-02-19T08:22:58.905644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:22:58.905738Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:22:59.166854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:01.665975Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:01.666049Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:01.806191Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:07.063402Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:07.063485Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:07.167765Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:08.875712Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:08.875793Z node 9 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:09.063427Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:10.459321Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:10.459402Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:10.561917Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:11.875814Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:11.875905Z node 11 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:11.991682Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:13.318148Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:13.318274Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:13.438162Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:15.170252Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:15.170315Z node 13 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:15.271112Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:17.036358Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:17.036453Z node 14 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:17.167923Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:18.642652Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:18.642734Z node 15 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:18.763291Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:21.494946Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:21.495040Z node 16 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:21.771984Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:24.542410Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:24.542510Z node 17 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:24.655479Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:27.127488Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:27.127594Z node 18 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:27.291355Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:30.663521Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:30.663618Z node 19 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:30.895422Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:34.434018Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:34.434114Z node 20 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:34.588690Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:37.585777Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:37.585866Z node 21 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:37.674875Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:40.772296Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:40.772402Z node 22 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:41.079376Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:43.599960Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:43.600054Z node 23 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:43.697701Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:46.896919Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:46.897029Z node 24 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:47.075987Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:47.730531Z node 24 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1240: Unexpected config sender died for subscription id=1 2026-02-19T08:23:48.452771Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:48.452883Z node 25 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:48.586924Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:50.149640Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:50.149740Z node 26 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:50.270912Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:52.021640Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:23:52.021761Z node 27 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:23:52.115393Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:23:58.928333Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:23:58.928451Z node 27 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:57.732031Z node 27 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2026-02-19T08:27:58.699076Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:58.699185Z node 28 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:58.799689Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:05.754083Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:28:05.754187Z node 28 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:02.057391Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:30:02.057500Z node 29 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:02.139580Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |94.9%| [TA] $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::DeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableNotExists [GOOD] Test command err: 2026-02-19T08:26:24.625135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489830396940198:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:24.625226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:24.816966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:24.834757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:24.834847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:24.886243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:24.888850Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:24.889971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489830396940167:2081] 1771489584623447 != 1771489584623450 2026-02-19T08:26:24.966921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:24.966953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:24.966987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:24.967112Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:25.072833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:27.484771Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a2/r3tmp/spilling-tmp-runner/node_1_49091c57-48ac41cf-64049a9a-4620c745, actor: [1:7608489843281843888:2384] 2026-02-19T08:26:27.484938Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a2/r3tmp/spilling-tmp-runner 2026-02-19T08:26:29.484126Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484191Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484224Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484247Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484270Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484303Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484351Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484412Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484437Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484461Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484486Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:29.484529Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3wtxabje4a54vrak2qad, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTAzMTFkNTgtNzZmMTU3YTQtOThjZmIxZmUtNjllNzZkOGE=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:29.484692Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489851871779559:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3wtxabje4a54vrak2qad, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTAzMTFkNTgtNzZmMTU3YTQtOThjZmIxZmUtNjllNzZkOGE=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:29.484728Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489851871779559:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3wtxabje4a54vrak2qad, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NTAzMTFkNTgtNzZmMTU3YTQtOThjZmIxZmUtNjllNzZkOGE=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:29.485053Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:26:29.485222Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485325Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485406Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485468Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485539Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485605Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485652Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485699Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485775Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:29.485825Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... nId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490739930571769:2763] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 608 DurationUs: 1000 Tasks { TaskId: 2 CpuTimeUs: 144 FinishTimeMs: 1771489796195 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 4 ReadBytes: 32 AffectedPartitions: 1 } IngressRows: 4 ComputeCpuTimeUs: 55 BuildCpuTimeUs: 89 WaitOutputTimeUs: 1881 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489796194 CreateTimeMs: 1771489796192 UpdateTimeMs: 1771489796195 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:56.196354Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490739930571769:2763] 2026-02-19T08:29:56.196390Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490739930571768:2762], CA [16:7608490739930571772:2766], CA [16:7608490739930571770:2764], trace_id# 2026-02-19T08:29:56.196418Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [16:7608490739930571768:2762], CA [16:7608490739930571772:2766], CA [16:7608490739930571770:2764], trace_id# 2026-02-19T08:29:56.196538Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490739930571770:2764] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 556 DurationUs: 1000 Tasks { TaskId: 3 CpuTimeUs: 243 FinishTimeMs: 1771489796195 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 34 BuildCpuTimeUs: 209 WaitOutputTimeUs: 1572 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489796194 CreateTimeMs: 1771489796192 UpdateTimeMs: 1771489796195 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:56.196561Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490739930571770:2764] 2026-02-19T08:29:56.196594Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490739930571768:2762], CA [16:7608490739930571772:2766], trace_id# 2026-02-19T08:29:56.196620Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [16:7608490739930571768:2762], CA [16:7608490739930571772:2766], trace_id# 2026-02-19T08:29:56.196908Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490739930571768:2762] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1478 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 934 FinishTimeMs: 1771489796196 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 20 ReadBytes: 160 AffectedPartitions: 2 } IngressRows: 20 ComputeCpuTimeUs: 88 BuildCpuTimeUs: 846 WaitOutputTimeUs: 1443 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489796195 CreateTimeMs: 1771489796191 UpdateTimeMs: 1771489796196 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:56.196938Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490739930571768:2762] 2026-02-19T08:29:56.196970Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490739930571772:2766], trace_id# 2026-02-19T08:29:56.196999Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [16:7608490739930571772:2766], trace_id# 2026-02-19T08:29:56.197113Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490739930571772:2766] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 4022 DurationUs: 1000 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 257 FinishTimeMs: 1771489796196 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 147 BuildCpuTimeUs: 110 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489796195 CreateTimeMs: 1771489796192 UpdateTimeMs: 1771489796196 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:29:56.197136Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490739930571772:2766] 2026-02-19T08:29:56.197257Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:29:56.197291Z node 16 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:29:56.197321Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [16:7608490739930571761:2543] TxId: 281474976710725. Ctx: { TraceId: 01khtga77ea6wep5hbj375e5r7, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YTkzZmZlMzctYjk2NGNiZWYtNmNlZDI4ZC1mMWJmMmY0NA==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.007292s ReadRows: 25 ReadBytes: 200 RequestUnits# 25 ForceFlag# true trace_id# 2026-02-19T08:29:57.506464Z node 17 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7608490744444425103:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:57.506550Z node 17 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:57.522707Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:57.674816Z node 17 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:57.676298Z node 17 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [17:7608490744444425068:2081] 1771489797505455 != 1771489797505458 2026-02-19T08:29:57.692801Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:57.692887Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:57.718858Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:57.719345Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:57.767602Z node 17 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:57.767624Z node 17 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:57.767633Z node 17 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:57.767723Z node 17 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:58.158303Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:01.838143Z node 17 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a2/r3tmp/spilling-tmp-runner/node_17_fd058263-fe9a7e17-969ed94d-4b648d61, actor: [17:7608490761624295102:2311] 2026-02-19T08:30:01.838361Z node 17 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a2/r3tmp/spilling-tmp-runner >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::InsertNotNull >> KqpNotNullColumns::ReplaceNotNullPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:40.939863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.050974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.060739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.061072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.061262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:41.420372Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:41.519531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:41.519644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:41.577261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:41.670382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:42.333432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:42.334022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:42.334067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:42.334105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:42.334725Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:42.400006Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:42.940342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:46.009449Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:46.015508Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:46.018630Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:46.042788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.042888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.092395Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:46.094103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.255040Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:46.255129Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:46.256359Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.256951Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.257463Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.258241Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.258493Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.258602Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.258788Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.258884Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.259099Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:46.274386Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:46.485072Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:46.539059Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:46.539167Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:46.580266Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:46.580495Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:46.580725Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:46.580794Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:46.580865Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:46.580939Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:46.581022Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:46.581080Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:46.581603Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:46.587403Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.587527Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:46.614402Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2629] 2026-02-19T08:25:46.615535Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2183:2629], schemeshard id = 72075186224037897 2026-02-19T08:25:46.687174Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2264:2670] 2026-02-19T08:25:46.689743Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:46.701477Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Describe result: PathErrorUnknown 2026-02-19T08:25:46.701559Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Creating table 2026-02-19T08:25:46.701655Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:46.708112Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:46.712408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:46.720228Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:46.720371Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:46.733544Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:46.931230Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:46.963465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:47.284213Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:47.415057Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:47.415163Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2270:2674] Owner: [2:2269:2673]. Column diff is empty, finishing 2026-02-19T08:25:48.118044Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:28:10.892399Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4318:3851], ActorId: [2:4566:4031], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:28:10.892456Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4318:3851], ActorId: [2:4566:4031], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhjMTEyNTYtNTBhNDM0ZTgtY2JkNjc5ZmEtNDAzMGE0YmE=, TxId: 2026-02-19T08:28:10.955499Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4700:4153]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:10.955772Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-02-19T08:28:10.955808Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 3, ReplyToActorId = [2:4700:4153], StatRequests.size() = 1 2026-02-19T08:28:11.104608Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4682:4138], ActorId: [2:4683:4139], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmY0OTZkYTctYTU2NzAzYzQtZjYxZGJiMGUtYTI2NDc0N2E=, TxId: 2026-02-19T08:28:11.104675Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4682:4138], ActorId: [2:4683:4139], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmY0OTZkYTctYTU2NzAzYzQtZjYxZGJiMGUtYTI2NDc0N2E=, TxId: 2026-02-19T08:28:11.104985Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4681:4137], ActorId: [2:4682:4138], Got response [2:4683:4139] SUCCESS 2026-02-19T08:28:11.105588Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:28:11.130014Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:28:11.130100Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:56: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId "operationId" , ActorId=[1:3424:3512] 2026-02-19T08:28:11.199390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-02-19T08:28:11.199489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-02-19T08:28:11.279460Z node 2 :STATISTICS DEBUG: service_impl.cpp:1153: EvRequestTimeout, pipe client id = [2:4458:3932], schemeshard count = 1 2026-02-19T08:28:12.084473Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:28:12.084652Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 35], AnalyzeActorId: [2:4754:4186] 2026-02-19T08:28:12.089958Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:28:12.093120Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table2` 2026-02-19T08:28:12.093240Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], Start read next stream part 2026-02-19T08:28:12.137554Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4781:4200]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:28:12.137846Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:28:12.137892Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [2:4781:4200], StatRequests.size() = 1 2026-02-19T08:29:15.412089Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:29:15.412244Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], Start read next stream part 2026-02-19T08:29:15.412387Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg72dxae74hsvaf7e9qzt7", SessionId: ydb://session/3?node_id=2&id=ZDA1YmMxNDQtZjNlNzM3ZTctNDJmZDE5ZGEtNThlNzkxNzQ=, Slow query, duration: 63.315739s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table2`", parameters: 0b 2026-02-19T08:29:15.413379Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:29:15.413424Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4764:4190], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2U5Yjg4ZDMtZWE2YTgxZjgtMmE5OTAwNjktYTVmNGE0NTY=, TxId: 2026-02-19T08:29:15.413503Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-02-19T08:29:15.413576Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:29:15.417105Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-02-19T08:29:15.417208Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], Start read next stream part 2026-02-19T08:29:15.464042Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4895:4302]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:29:15.464293Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-02-19T08:29:15.464347Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 5, ReplyToActorId = [2:4895:4302], StatRequests.size() = 1 2026-02-19T08:30:02.640318Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:30:02.640478Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], Start read next stream part 2026-02-19T08:30:02.640680Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg908s1tj9y0y6w0vjdja8", SessionId: ydb://session/3?node_id=2&id=NzQ5YTM5MDAtZTUwNDc4N2QtZGNhMWIxMWYtOGQ3NjQxNTU=, Slow query, duration: 47.220769s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table2`", parameters: 0b 2026-02-19T08:30:02.643406Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:30:02.643473Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4754:4186], ActorId: [2:4877:4287], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2JlNDU1NjgtMmE1YWI5M2YtM2Y1YzE1NWUtMmEwMGMzZTY=, TxId: 2026-02-19T08:30:02.643588Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-02-19T08:30:02.643854Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4984:4385], ActorId: [2:4986:4387], Starting query actor #1 [2:4987:4388] 2026-02-19T08:30:02.643911Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4986:4387], ActorId: [2:4987:4388], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:30:02.648681Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4986:4387], ActorId: [2:4987:4388], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDM3NTk2OGMtMWJmNWExMzMtNTVlYTE0OTktMTk1NmI3YmU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:30:02.700313Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4986:4387], ActorId: [2:4987:4388], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDM3NTk2OGMtMWJmNWExMzMtNTVlYTE0OTktMTk1NmI3YmU=, TxId: 2026-02-19T08:30:02.700400Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4986:4387], ActorId: [2:4987:4388], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDM3NTk2OGMtMWJmNWExMzMtNTVlYTE0OTktMTk1NmI3YmU=, TxId: 2026-02-19T08:30:02.700844Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4984:4385], ActorId: [2:4986:4387], Got response [2:4987:4388] SUCCESS 2026-02-19T08:30:02.701239Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:30:02.740543Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-02-19T08:30:02.740640Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3424:3512] >> KqpNewEngine::InShardsWrite >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpNewEngine::KeyColumnOrder >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::ComplexKey >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> KqpHashCombineReplacement::DqHashCombineTest-UseDqHashCombine+UseDqHashAggregate [GOOD] >> KqpKv::BulkUpsert >> KqpNewEngine::Truncated >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::TopSortResults >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::Replace >> KqpNamedExpressions::NamedExpressionRandomChanged2+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomChanged2-UseSink >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::ValidatePredicates >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2117] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:91:2057] recipient: [36:88:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2118] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:105:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:85:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:88:2057] recipient: [45:87:2117] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:90:2057] recipient: [45:87:2117] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:89:2118] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:205:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:85:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:88:2057] recipient: [46:87:2117] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:90:2057] recipient: [46:87:2117] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:89:2118] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:205:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:86:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:89:2057] recipient: [47:88:2117] Leader for TabletID 72057594037927937 is [47:90:2118] sender: [47:91:2057] recipient: [47:88:2117] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] >> KqpNamedExpressions::NamedExpressionChanged+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionChanged-UseSink >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopParameter >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList >> TOlap::StoreStatsQuota [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpKv::ReadRows_SpecificKey >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksEffects >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for Tab ... t: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:84:2115] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:87:2057] recipient: [35:84:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:86:2116] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:202:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:83:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:85:2115] Leader for TabletID 72057594037927937 is [36:87:2116] sender: [36:88:2057] recipient: [36:85:2115] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:54:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:52:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:52:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:54:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:54:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:203:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:52:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:86:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:89:2057] recipient: [45:88:2118] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:91:2057] recipient: [45:88:2118] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:90:2119] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:206:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:86:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:89:2057] recipient: [46:88:2118] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:91:2057] recipient: [46:88:2118] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:90:2119] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:206:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:87:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:90:2057] recipient: [47:89:2118] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:92:2057] recipient: [47:89:2118] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:91:2119] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:207:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> TOlapNaming::AlterColumnStoreFailed [GOOD] >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::SelectNotNullColumns >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::Update+UseSink >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:27:05.105107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:27:05.105207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.105286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:27:05.105332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:27:05.105392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:27:05.105430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:27:05.105480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:27:05.105512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:27:05.105578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:27:05.105650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:27:05.106579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:27:05.106899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:27:05.179878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:27:05.179920Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:05.191446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:27:05.192153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:27:05.192364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:27:05.204773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:27:05.205661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:27:05.206388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:27:05.206722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:27:05.210107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.210261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:27:05.211267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:27:05.211318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:27:05.211397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:27:05.211432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:27:05.211479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:27:05.211637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:27:05.363591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.364989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:27:05.365570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... nStep5000039 State->FrontStep: 5000039 2026-02-19T08:30:13.145134Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:30:13.145189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:30:13.145428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:30:13.145583Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:30:13.145629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:30:13.145699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-02-19T08:30:13.145797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.145853Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:30:13.145915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-02-19T08:30:13.146895Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:30:13.147007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:30:13.147049Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:30:13.147092Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:30:13.147141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:30:13.147856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:30:13.147929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:30:13.147970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:30:13.148000Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:30:13.148032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:30:13.148105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:30:13.150049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-02-19T08:30:13.150132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-02-19T08:30:13.150223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-02-19T08:30:13.150491Z node 2 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-02-19T08:30:13.150769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6496: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.150873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-02-19T08:30:13.151604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:30:13.153325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:30:13.153768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.171019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6670: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-02-19T08:30:13.171106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:30:13.171243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:30:13.173905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.174081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.174127Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:30:13.174256Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:30:13.174292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:30:13.174336Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:30:13.174370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:30:13.174406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:30:13.174476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:751:2727] message: TxId: 102 2026-02-19T08:30:13.174529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:30:13.174571Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:30:13.174605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:30:13.174746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:30:13.176692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:30:13.176753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:813:2781] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-02-19T08:30:13.179443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:30:13.179671Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:30:13.179939Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-02-19T08:30:13.182261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:30:13.182494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:30:13.182822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:30:13.182870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:30:13.183271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:30:13.183366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:30:13.183407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:851:2819] TestWaitNotification: OK eventTxId 103 >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite >> KqpNotNullColumns::UpsertNotNullPk >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::SimpleRange |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::RandomPointsAndRanges [GOOD] |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiEffects >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning-UseSink-UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning+UseSink-UseDataQuery >> KqpNamedExpressions::NamedExpressionRandomChanged2-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomInsert+UseSink >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict >> KqpRanges::NullInKey >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::OrderedScalarContext >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PrecomputeKey >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::UpdateNotNull >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::JoinWithParams >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpNewEngine::Delete-UseSink >> KqpNamedExpressions::NamedExpressionChanged-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomChanged+UseSink >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> KqpSort::TopParameter [GOOD] >> KqpSort::TopParameterFilter >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2026-02-19T08:28:42.393327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:42.393377Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.394164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.405488Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.405836Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:42.406105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.444296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.451159Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.451326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.452856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:42.452923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:42.452968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:42.453407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.453481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.453558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:42.519586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.555022Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:42.555188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.555264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:42.555313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:42.555345Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:42.555377Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.555534Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.555577Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.555833Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:42.555947Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:42.556046Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.556080Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.556126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:42.556163Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:42.556204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:42.556236Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:42.556274Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:42.556379Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.556423Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.556465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:42.565671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:42.565768Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.565853Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:42.566010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:42.566073Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:42.566144Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:42.566180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.566214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:42.566248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:42.566278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.566535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.566565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:42.566604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:42.566633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.566683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:42.566716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:42.566767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:42.566798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.566822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.578434Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:42.578484Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.578529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.578558Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:42.578615Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:42.578978Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.579031Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.579073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:42.579188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:42.579223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:42.579361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.579398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:42.579431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:42.579459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:42.582003Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:42.582047Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.582195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.582222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.582254Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.582284Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:42.582312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:42.582339Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:42.582363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... atashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-02-19T08:30:13.614821Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:30:13.615067Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-02-19T08:30:13.615105Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:30:13.615129Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2026-02-19T08:30:13.615161Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:4026: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2026-02-19T08:30:13.615212Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:30:13.615235Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2026-02-19T08:30:13.615272Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-02-19T08:30:13.615319Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.615348Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:30:13.616083Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [4:240:2231], Recipient [4:459:2400]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2026-02-19T08:30:13.616132Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:13.616166Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2026-02-19T08:30:13.616362Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [4:240:2231], Recipient [4:459:2400]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.616396Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:13.616423Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2026-02-19T08:30:13.616654Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287425, Sender [4:240:2231], Recipient [4:348:2314]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-02-19T08:30:13.616695Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3235: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-02-19T08:30:13.616729Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3395: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2026-02-19T08:30:13.616795Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2026-02-19T08:30:13.616839Z node 4 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2026-02-19T08:30:13.616906Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437185 2026-02-19T08:30:13.617333Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [4:348:2314], Recipient [4:348:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:30:13.617376Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:30:13.617427Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-02-19T08:30:13.617460Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:30:13.617496Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2026-02-19T08:30:13.617524Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2026-02-19T08:30:13.617558Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-02-19T08:30:13.617588Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2026-02-19T08:30:13.617614Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit BlockFailPoint 2026-02-19T08:30:13.617642Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit BlockFailPoint 2026-02-19T08:30:13.617666Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-02-19T08:30:13.617688Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit BlockFailPoint 2026-02-19T08:30:13.617746Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2026-02-19T08:30:13.617771Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2026-02-19T08:30:13.619796Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2026-02-19T08:30:13.619855Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 1, NSelectRange: 4, NUpdateRow: 4, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 85, SelectRangeBytes: 680, UpdateRowBytes: 29, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-02-19T08:30:13.619916Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2026-02-19T08:30:13.619963Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2026-02-19T08:30:13.619996Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2026-02-19T08:30:13.620025Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2026-02-19T08:30:13.620263Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is DelayComplete 2026-02-19T08:30:13.620295Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2026-02-19T08:30:13.620322Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2026-02-19T08:30:13.620350Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2026-02-19T08:30:13.620383Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2026-02-19T08:30:13.620405Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2026-02-19T08:30:13.620430Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437185 has finished 2026-02-19T08:30:13.620458Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:30:13.620485Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-02-19T08:30:13.620512Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-02-19T08:30:13.620535Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-02-19T08:30:13.640356Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-02-19T08:30:13.640408Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2026-02-19T08:30:13.640458Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:104:2137], exec latency: 3 ms, propose latency: 5 ms 2026-02-19T08:30:13.640507Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.640535Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-02-19T08:30:13.641234Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [4:459:2400], Recipient [4:348:2314]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.641276Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:13.641312Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2026-02-19T08:30:13.658824Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-02-19T08:30:13.658889Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2026-02-19T08:30:13.658948Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:104:2137], exec latency: 3 ms, propose latency: 5 ms 2026-02-19T08:30:13.659010Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.659040Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-02-19T08:30:13.659628Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [4:348:2314], Recipient [4:240:2231]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2026-02-19T08:30:13.659679Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:13.659714Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LocksNoMutations >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_NotFullPK >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNewEngine::Update-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: 2026-02-19T08:29:41.456823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490677558511590:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:41.456891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:41.626134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:41.632512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:41.632597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:41.696655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:41.723386Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:41.727430Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490677558511547:2081] 1771489781455065 != 1771489781455068 2026-02-19T08:29:41.794945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:41.794966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:41.794973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:41.795064Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:41.899225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:42.174139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:42.462207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:44.246367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490690443414324:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.246502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.246794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490690443414334:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.246832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.458696Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490690443414347:2502] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2026-02-19T08:29:44.480164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490690443414357:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.480236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.480451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490690443414359:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.480491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.492600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.273358Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490694279695746:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:45.273823Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:45.287393Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:45.350770Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:45.352447Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490694279695706:2081] 1771489785272267 != 1771489785272270 2026-02-19T08:29:45.387259Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:45.387345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:45.391492Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:45.425332Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:45.425355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:45.425361Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:45.425437Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:45.524953Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:45.707110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:46.281214Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:48.006610Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490707164598479:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.006702Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.007091Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490707164598489:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.007147Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.023017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.067180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490707164598581:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.067304Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.067973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490707164598583:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.068061Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.086653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.126989Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPo ... n or outdated, will use file: (empty maybe) 2026-02-19T08:30:04.274605Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:04.274614Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:04.274706Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:04.764063Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:05.054205Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:07.920894Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608490789067121817:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.921029Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.921505Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608490789067121826:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.921562Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.955185Z node 6 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [6:7608490789067121841:2505] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2026-02-19T08:30:07.974079Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608490789067121849:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.974172Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.974564Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608490789067121852:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:07.974620Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.000460Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.965577Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490792825476461:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:08.965680Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:08.986930Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:09.075155Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:09.080971Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490792825476422:2081] 1771489808964604 != 1771489808964607 2026-02-19T08:30:09.089693Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:09.089813Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:09.121068Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:09.178032Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:09.178056Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:09.178067Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:09.178153Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:09.274108Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:09.807273Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:09.819698Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:09.973434Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:12.917941Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490810005346496:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.918057Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.918437Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490810005346506:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.918504Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.938145Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:13.908226Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490816152011266:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:13.908491Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:13.924227Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:14.015135Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:14.017267Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490816152011228:2081] 1771489813906993 != 1771489813906996 2026-02-19T08:30:14.034720Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:14.034842Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:14.094233Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:14.146346Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:14.146375Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:14.146385Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:14.146496Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:14.225631Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:14.765778Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:14.772243Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:14.915710Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:18.208736Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> KqpSort::TopSortResults [GOOD] >> KqpSort::UnionAllSortLimit >> KqpReturning::ReturningTwice >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ScalarFunctions >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> KqpReturning::ReturningWorksIndexedUpsert+QueryService >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [GOOD] >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNotNullColumns::UpsertNotNull >> KqpRanges::NullInKey [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpRanges::NullInKeySuffix >> KqpReturning::ReturningUpsertAsTableListWithNullable+QueryService >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::LeftSemiJoin >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DeleteOn+UseSink >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpNewEngine::PrimaryView >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> KqpSort::TopParameterFilter [GOOD] >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete >> KqpNamedExpressions::NamedExpressionRandomChanged+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandom+UseSink >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpKv::ReadRows_TimeoutCancelsReads >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReturningSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: 2026-02-19T08:29:38.808733Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490662892298378:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:38.808870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:39.010047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:39.027326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:39.027416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:39.053163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:39.101680Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490662892298337:2081] 1771489778807221 != 1771489778807224 2026-02-19T08:29:39.108728Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:39.152222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:39.152248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:39.152255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:39.152345Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:39.264455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:39.501322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:39.550820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.671097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.787214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.820744Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:39.839664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.314755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490675777202102:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.314857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.315180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490675777202112:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.315247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.613760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.643783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.673040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.703416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.728890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.759897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.790844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.831014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.896607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490675777202983:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.896712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.896792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490675777202988:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.896969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490675777202990:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.897003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.900061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:41.908624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490675777202991:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:42.009210Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490675777203043:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:44.312687Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490688757767041:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:44.312753Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:44.322046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:44.402497Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:2 ... WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:30:19.597008Z node 7 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:30:19.621862Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:19.635020Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:19.635053Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:19.635071Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:19.635168Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:19.677815Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:20.161111Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:20.172602Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.250650Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.458899Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:20.507168Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.594305Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.358780Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490857126704363:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.358879Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.359127Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490857126704372:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.359180Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.444761Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.487655Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.525816Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.560844Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.599046Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.671764Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.713108Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.769325Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.857521Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490857126705247:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.857622Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490857126705252:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.857643Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.857902Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490857126705254:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.857953Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:23.861980Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:23.873522Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490857126705255:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:23.938525Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490857126705307:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:24.446987Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490839946833351:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:24.447076Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:38" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '('('"ForcePrimary")) (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '494) '('"_id" '"36d85d50-174ef210-adeb779c-ccad9524") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '507) '('"_id" '"38c6f18f-1d3e8914-2ad1496e-e30a3ab1")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099 ... 57] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:142:2057] recipient: [56:141:2158] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:144:2057] recipient: [56:141:2158] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:143:2159] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:259:2057] recipient: [56:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:52:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:52:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] !Reboot 72057594037927937 (actor [57:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:140:2057] recipient: [57:39:2086] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:143:2057] recipient: [57:142:2158] Leader for TabletID 72057594037927937 is [57:144:2159] sender: [57:145:2057] recipient: [57:142:2158] !Reboot 72057594037927937 (actor [57:58:2099]) rebooted! !Reboot 72057594037927937 (actor [57:58:2099]) tablet resolver refreshed! new actor is[57:144:2159] Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:59:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:76:2057] recipient: [58:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:54:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:54:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:52:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:52:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:81:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:84:2057] recipient: [63:83:2114] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:86:2057] recipient: [63:83:2114] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:85:2115] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:201:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:52:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:52:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:81:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:84:2057] recipient: [64:83:2114] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:86:2057] recipient: [64:83:2114] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:85:2115] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:201:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:82:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:85:2057] recipient: [65:84:2114] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:87:2057] recipient: [65:84:2114] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:86:2115] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:202:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:54:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:54:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:85:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:88:2057] recipient: [66:87:2117] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:90:2057] recipient: [66:87:2117] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:89:2118] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:205:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:205:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:54:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:86:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:89:2057] recipient: [68:88:2117] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:91:2057] recipient: [68:88:2117] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:90:2118] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:206:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:52:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:52:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] >> KqpNewEngine::StreamLookupWithView >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_Decimal >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::LookupColumns >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpNewEngine::DecimalColumn >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::JoinPure >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpReturning::ReturningUpsertAsTableListWithNullable+QueryService [GOOD] >> KqpReturning::ReturningUpsertAsTableListWithNullable-QueryService >> KqpSort::ReverseFirstKeyOptimized >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpNewEngine::ComplexLookupLimit [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex-UseSink-UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex+UseSink-UseDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:54:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:54:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:52:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:54:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:52:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:52:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:54:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:82:2113] Leader for TabletID 72057594037927937 is [45:82:2113] sender: [45:198:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:78:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:81:2057] recipient: [46:80:2112] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:83:2057] recipient: [46:80:2112] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:82:2113] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:198:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:79:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:82:2057] recipient: [47:81:2112] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:84:2057] recipient: [47:81:2112] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:83:2113] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:199:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:82:2057] recipient: [48:39:2086] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:85:2057] recipient: [48:84:2115] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:87:2057] recipient: [48:84:2115] !Reboot 72057594037927937 (actor [48:58:2099]) rebooted! !Reboot 72057594037927937 (actor [48:58:2099]) tablet resolver refreshed! new actor is[48:86:2116] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:202:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:54:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:59:2057] recipient: [49:54:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:76:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:82:2057] recipient: [49:39:2086] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:85:2057] recipient: [49:84:2115] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:87:2057] recipient: [49:84:2115] !Reboot 72057594037927937 (actor [49:58:2099]) rebooted! !Reboot 72057594037927937 (actor [49:58:2099]) tablet resolver refreshed! new actor is[49:86:2116] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:202:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:52:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:59:2057] recipient: [50:52:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:76:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:83:2057] recipient: [50:39:2086] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:86:2057] recipient: [50:85:2115] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:88:2057] recipient: [50:85:2115] !Reboot 72057594037927937 (actor [50:58:2099]) rebooted! !Reboot 72057594037927937 (actor [50:58:2099]) tablet resolver refreshed! new actor is[50:87:2116] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:203:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:54:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:59:2057] recipient: [51:54:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:76:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:86:2057] recipient: [51:39:2086] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:89:2057] recipient: [51:88:2118] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:91:2057] recipient: [51:88:2118] !Reboot 72057594037927937 (actor [51:58:2099]) rebooted! !Reboot 72057594037927937 (actor [51:58:2099]) tablet resolver refreshed! new actor is[51:90:2119] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:206:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:52:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:59:2057] recipient: [52:52:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:76:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:86:2057] recipient: [52:39:2086] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:89:2057] recipient: [52:88:2118] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:91:2057] recipient: [52:88:2118] !Reboot 72057594037927937 (actor [52:58:2099]) rebooted! !Reboot 72057594037927937 (actor [52:58:2099]) tablet resolver refreshed! new actor is[52:90:2119] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:206:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:59:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:76:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:87:2057] recipient: [53:39:2086] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:90:2057] recipient: [53:89:2118] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:92:2057] recipient: [53:89:2118] !Reboot 72057594037927937 (actor [53:58:2099]) rebooted! !Reboot 72057594037927937 (actor [53:58:2099]) tablet resolver refreshed! new actor is[53:91:2119] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:207:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:54:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:59:2057] recipient: [54:54:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:76:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:90:2057] recipient: [54:39:2086] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:93:2057] recipient: [54:92:2121] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:95:2057] recipient: [54:92:2121] !Reboot 72057594037927937 (actor [54:58:2099]) rebooted! !Reboot 72057594037927937 (actor [54:58:2099]) tablet resolver refreshed! new actor is[54:94:2122] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:210:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:59:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:76:2057] recipient: [55:14:2061] !Reboot 72057594037927937 (actor [55:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:90:2057] recipient: [55:39:2086] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:93:2057] recipient: [55:92:2121] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:95:2057] recipient: [55:92:2121] !Reboot 72057594037927937 (actor [55:58:2099]) rebooted! !Reboot 72057594037927937 (actor [55:58:2099]) tablet resolver refreshed! new actor is[55:94:2122] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:210:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:54:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:59:2057] recipient: [56:54:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:76:2057] recipient: [56:14:2061] !Reboot 72057594037927937 (actor [56:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:91:2057] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:94:2057] recipient: [56:93:2121] Leader for TabletID 72057594037927937 is [56:95:2122] sender: [56:96:2057] recipient: [56:93:2121] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:52:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:52:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNewEngine::MultipleBroadcastJoin >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> KqpUnion::ParallelUnionAll >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UnionAllPure >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::IndexAutochooserTopSort >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: 2026-02-19T08:29:37.908932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490661385807140:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:37.908990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:38.109342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:38.130766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:38.130956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:38.178861Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:38.181575Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490661385807097:2081] 1771489777907242 != 1771489777907245 2026-02-19T08:29:38.191266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:38.249246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:38.249276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:38.249283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:38.249393Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:38.320377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:38.542082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:38.915790Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:40.369319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674270709877:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.369399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.369688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674270709887:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.369815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.615961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.691384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674270709979:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.691525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.691622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674270709984:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.691714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674270709986:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.691775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.695269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.703900Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490674270709988:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:29:40.831797Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490674270710039:2593] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:41.824695Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2026-02-19T08:29:42.670848Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490680883407491:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:42.670910Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:42.683255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:42.759649Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:42.761627Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490680883407460:2081] 1771489782669663 != 1771489782669666 2026-02-19T08:29:42.791556Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:42.791625Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:42.795131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:42.838579Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:42.838602Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:42.838610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:42.838676Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:42.974568Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:43.225833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:43.244202Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.303341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.451730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.519142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.676910Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:45.309131Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490693768311217:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.309230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Re ... 861189153117:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:24.376486Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:24.406127Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:24.502679Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:24.504520Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490861189153086:2081] 1771489824373020 != 1771489824373023 2026-02-19T08:30:24.522744Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:24.522846Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:24.554984Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:24.717822Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:24.738558Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:24.738586Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:24.738598Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:24.738689Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:25.383398Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:25.443125Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:25.450920Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:25.460535Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.536973Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.741460Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.834009Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.169796Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490882663991447:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.169891Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.170313Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490882663991456:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.170364Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.266774Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.318411Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.360959Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.377902Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490861189153117:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:29.377978Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:29.397430Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.436654Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.495374Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.540221Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.597568Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.689399Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490882663992333:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.689503Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.689624Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490882663992338:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.689738Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490882663992340:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.689827Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:29.693360Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:29.705201Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490882663992342:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:29.772272Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490882663992393:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:31.769064Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::LocksInRoTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] Test command err: 2026-02-19T08:29:37.840109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490657969317248:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:37.840318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:38.069368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:38.069519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:38.087059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:38.106126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:38.114873Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:38.116062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490657969317214:2081] 1771489777838743 != 1771489777838746 2026-02-19T08:29:38.181781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:38.181800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:38.181805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:38.181935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:38.352050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:38.497986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:38.537613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.648087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.766117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.826935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.918129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:40.356742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670854220974:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.356851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.357152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670854220984:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.357215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.660196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.686340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.712654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.740232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.764610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.793258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.821952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.874450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.935949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670854221858:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.936033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.936241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670854221864:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.936296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670854221863:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.936351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.939378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.948831Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490670854221867:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:41.005492Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490670854221918:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:42.963646Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490679577065542:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:42.964239Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:42.972598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:43.056281Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:2 ... 30:25.593237Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490866903479409:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:25.593303Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:25.611701Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:25.729864Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:25.729964Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:25.736618Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490866903479380:2081] 1771489825591466 != 1771489825591469 2026-02-19T08:30:25.751154Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:25.767520Z node 8 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:30:25.767803Z node 8 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:30:25.818892Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:25.826330Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:25.826355Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:25.826365Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:25.826456Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:25.900960Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:26.426015Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:26.431373Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:26.442376Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.514912Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.647014Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:26.698794Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.778872Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.127270Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490888378317733:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.127368Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.127810Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490888378317743:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.127873Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.223500Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.266742Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.303068Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.344483Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.383845Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.424030Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.467721Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.535279Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.594435Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490866903479409:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:30.594634Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:30.685241Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490888378318614:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.685356Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490888378318619:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.685373Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.685727Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490888378318621:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.685815Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:30.689446Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:30.700982Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490888378318622:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:30.788445Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490888378318675:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNewEngine::Join >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningTypes >> KqpNewEngine::PrimaryView [GOOD] >> KqpNewEngine::DependentSelect [GOOD] >> KqpKv::ReadRows_Decimal [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [GOOD] >> KqpBatchUpdate::SimplePartitions [GOOD] >> KqpSqlIn::Delete [GOOD] >> KqpSqlIn::InWithCast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] Test command err: 2026-02-19T08:29:51.771241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490721496547988:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:51.773407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:51.944541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:51.971495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:51.971642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:52.023749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:52.052327Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:52.053225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490721496547950:2081] 1771489791768800 != 1771489791768803 2026-02-19T08:29:52.103599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:52.103614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:52.103626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:52.103711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:52.206483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:52.445085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:52.776349Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:54.583443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490734381450731:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.583552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.583836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490734381450741:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.583900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.831327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.939768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490734381450835:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.939832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.939916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490734381450840:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.940090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490734381450842:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.940138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.943534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:54.955186Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490734381450843:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:29:55.082467Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490738676418191:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:55.244668Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490738676418232:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2026-02-19T08:29:55.246048Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MzY2NmU0MWMtYTI1ZGIxMmMtNTM5NmMyYTQtM2JiNWEzZjc=, ActorId: [1:7608490734381450728:2326], ActorState: ExecuteState, LegacyTraceId: 01khtga74r4f7jyprrsm380bj7, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 13 } message: "Missing key column in input: Key for table: /Root/TestInsertNotNullPk" end_position { row: 1 column: 13 } issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:29:55.269968Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490738676418241:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:29:55.270362Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MzY2NmU0MWMtYTI1ZGIxMmMtNTM5NmMyYTQtM2JiNWEzZjc=, ActorId: [1:7608490734381450728:2326], ActorState: ExecuteState, LegacyTraceId: 01khtga75p5knsq91hrxct6sfg, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert type: Struct<\'Key\':Null,\'Value\':String> to Struct<\'Key\':Uint64,\'Value\':String?>" end_position { row: 1 column: 47 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert \'Key\': Null to Uint64" end_position { row: 1 column: 47 } severity: 1 } } issues { position { row: 1 column: 47 } message: "Failed to convert input columns types to scheme types" end_position { row: 1 column: 47 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:29:56.139520Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490741697213083:2211];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:56.146279Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:56.149876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:56.219248Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:56.229997Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490741697212886:2081] 1771489796131619 != 1771489796131622 2026-02-19T08:29:56.251619Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:56.251694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:56.256529Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:56.330207Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:56.330226Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:56.330233Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:56.330289Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:56.384843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, sc ... pp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:28.517828Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:28.519606Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490877758035199:2081] 1771489828397431 != 1771489828397434 2026-02-19T08:30:28.534230Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:28.534335Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:28.563191Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:28.624505Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:28.624531Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:28.624541Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:28.624629Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:28.693065Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:29.234719Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:29.241041Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:29.251132Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.318983Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.430991Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:29.481481Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.558727Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.613785Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490894937906262:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.613900Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.614222Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490894937906272:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.614277Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.699935Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.742067Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.789664Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.839999Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.882739Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.932775Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.992857Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.063250Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.162420Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490899232874439:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.162543Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.162604Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490899232874444:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.162857Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490899232874446:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.162928Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.166675Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:33.178212Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490899232874447:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:33.264276Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490899232874499:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:33.399896Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490877758035258:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:33.399980Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:35.251726Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.443659Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table >> KqpNewEngine::Select1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DependentSelect [GOOD] Test command err: 2026-02-19T08:29:37.705459Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490659486995972:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:37.706035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:37.896199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:37.920976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:37.921085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:37.959899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:37.998254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490659486995926:2081] 1771489777703751 != 1771489777703754 2026-02-19T08:29:37.999170Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:38.048266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:38.048286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:38.048299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:38.048390Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:38.077315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:38.387452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:38.436385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.527146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.631450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.688931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.809828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:40.412617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672371899694:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.412767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.413088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672371899704:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.413151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.628211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.651515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.674632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.697263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.721659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.770473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.793772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.825915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.886469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672371900574:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.886530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.886600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672371900579:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.886644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672371900580:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.886677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.889746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.898508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490672371900583:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:40.961533Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490672371900634:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:43.360045Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490685726995117:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:43.360097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:43.372191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:43.438659Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:2 ... 643686Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490838138264439:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:24.643798Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:28.666363Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490879347364352:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:28.666436Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:28.685653Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:28.792021Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:28.797855Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490879347364317:2081] 1771489828665379 != 1771489828665382 2026-02-19T08:30:28.806648Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:28.806789Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:28.838839Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:28.929746Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:28.929773Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:28.929783Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:28.929878Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:28.953795Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:29.550401Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:29.572413Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.637839Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.757836Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:29.805789Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:29.866750Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.993841Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490896527235397:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.993953Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.994219Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490896527235406:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:32.994266Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.083680Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.136327Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.178558Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.216913Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.256109Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.301763Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.345793Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.412791Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.521841Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490900822203570:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.521933Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.521987Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490900822203575:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.522141Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490900822203577:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.522189Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:33.526213Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:33.542316Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490900822203579:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:33.611541Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490900822203630:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:33.666593Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490879347364352:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:33.666664Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: 2026-02-19T08:29:39.049093Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490668361717806:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:39.049542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:39.237211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:39.260337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:39.260431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:39.298683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:39.299342Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:39.300428Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490668361717772:2081] 1771489779047777 != 1771489779047780 2026-02-19T08:29:39.393559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:39.393593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:39.393601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:39.393702Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:39.454466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:39.753626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:39.806545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.949745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.055312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:40.094379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.152322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.320362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676951654238:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.320464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.320802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676951654248:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.320873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.561087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.590297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.619287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.651909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.681016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.715708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.748040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.788227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.886485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676951655117:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.886561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.886809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676951655122:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.886854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676951655123:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.886862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.890441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:41.900458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490676951655126:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:42.005220Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490676951655177:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:44.342219Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490689476990903:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:44.342325Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:44.353705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:44.429100Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:2 ... ification cookie mismatch for subscription [8:7608490870162149838:2081] 1771489826903651 != 1771489826903654 2026-02-19T08:30:27.047918Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:27.048036Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:27.068253Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:27.125158Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:27.125182Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:27.125192Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:27.125280Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:27.126445Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:27.628010Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:27.634671Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:27.646904Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:27.707798Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:27.918168Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:27.974725Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:28.066196Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.100005Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490891636988202:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.100099Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.100340Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490891636988211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.100381Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.185249Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.219834Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.251729Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.286638Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.321122Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.368341Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.409657Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.468353Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.557988Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490891636989082:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.558116Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.558529Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490891636989087:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.558601Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490891636989088:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.558710Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:31.562898Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:31.574584Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490891636989091:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:31.633328Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490891636989142:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:31.904716Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490870162149869:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:31.904799Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:33.625908Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.679383Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.732114Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::StaleRO+EnableFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: 2026-02-19T08:29:43.619193Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490685237748214:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:43.619254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:43.806705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:43.820876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:43.820964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:43.888628Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:43.889046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:43.889422Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490685237748173:2081] 1771489783617819 != 1771489783617822 2026-02-19T08:29:43.963873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:43.985522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:43.985540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:43.985545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:43.985596Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:44.318979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:44.625376Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:46.255529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490698122650949:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:46.255570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490698122650958:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:46.255670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:46.256059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490698122650964:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:46.256202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:46.259516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:46.269288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490698122650963:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:29:46.387096Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490698122651016:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:46.715812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:29:47.187262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:47.187581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:47.187848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:47.188020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:47.188102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:47.188173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:47.188200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:47.188283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:47.188354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:47.188462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:47.188475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:47.188594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:47.188603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:47.188721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:47.188748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:47.188835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:29:47.188841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:47.188934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:29:47.188970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:47.188999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:29:47.189073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7608490698122651612:2349];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:29:47.189083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7608490698122651610:2347];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:47.18 ... Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:21.661110Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:30:21.664010Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:305:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:30:21.664460Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:30:21.664536Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:30:21.963928Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:21.964053Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:21.991701Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:34:2081] 1771489817708043 != 1771489817708046 2026-02-19T08:30:21.995650Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:22.041880Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:22.143859Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:30:22.587148Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:22.587216Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:22.587260Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:22.587811Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:22.601300Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:23.117076Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:23.211255Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ... waiting for SysViewsRoster update finished 2026-02-19T08:30:28.633889Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:28.650195Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:30:28.654926Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:320:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:30:28.655504Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:30:28.655600Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:30:28.972676Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:28.972834Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:29.011952Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:34:2081] 1771489824887031 != 1771489824887035 2026-02-19T08:30:29.014550Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:29.064217Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:29.184813Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:30:29.593798Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:29.593876Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:29.593930Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:29.594471Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:29.606159Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:30.155058Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:30.259461Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.258324Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490895166675891:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:32.259121Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:32.276517Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:32.365136Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:32.365249Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:32.369183Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:32.373277Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490895166675826:2081] 1771489832252296 != 1771489832252299 2026-02-19T08:30:32.405326Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:32.450237Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:32.485774Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:32.485795Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:32.485805Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:32.485910Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:33.132123Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:33.267162Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:36.572741Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490912346545902:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.572872Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.573154Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490912346545911:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.573268Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.607904Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.728334Z node 8 :RPC_REQUEST ERROR: rpc_read_rows.cpp:813: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2026-02-19T08:30:36.741984Z node 8 :RPC_REQUEST ERROR: rpc_read_rows.cpp:813: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] >> KqpRanges::MergeRanges >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: 2026-02-19T08:26:23.202003Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489826817320807:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:26:23.202959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:26:23.397549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:26:23.415304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:26:23.415422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:26:23.457567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:26:23.459486Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:26:23.461028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489826817320765:2081] 1771489583197548 != 1771489583197551 2026-02-19T08:26:23.544998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:26:23.545025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:26:23.545038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:26:23.545119Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:26:23.667362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:26:25.710185Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006a5/r3tmp/spilling-tmp-runner/node_1_140c8502-763ec982-b226c95c-da6c725f, actor: [1:7608489835407257197:2384] 2026-02-19T08:26:25.710354Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006a5/r3tmp/spilling-tmp-runner 2026-02-19T08:26:27.505859Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.505924Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.505963Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.505992Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506020Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506058Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506105Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506142Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506177Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506214Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506275Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:26:27.506335Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtg3v4c67kb82k4zw6fafsq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyYTJjMjAtYjNhMzJhZDItMTRmNzg4MDgtODNhZGFmNWU=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:26:27.506515Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489843997192863:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3v4c67kb82k4zw6fafsq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyYTJjMjAtYjNhMzJhZDItMTRmNzg4MDgtODNhZGFmNWU=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:26:27.506557Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489843997192863:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtg3v4c67kb82k4zw6fafsq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyYTJjMjAtYjNhMzJhZDItMTRmNzg4MDgtODNhZGFmNWU=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:26:27.506811Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:26:27.506975Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507029Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507078Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507134Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507190Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507240Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507291Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507339Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507380Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:26:27.507439Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... _EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [16:7608490911162763185:2843], CA [16:7608490911162763182:2840], CA [16:7608490911162763183:2841], CA [16:7608490911162763181:2839], CA [16:7608490911162763184:2842], trace_id# 2026-02-19T08:30:36.629645Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490911162763183:2841] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 537 Tasks { TaskId: 3 CpuTimeUs: 130 FinishTimeMs: 1771489836628 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 1 ReadBytes: 12 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 62 BuildCpuTimeUs: 68 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489836628 CreateTimeMs: 1771489836627 UpdateTimeMs: 1771489836628 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:30:36.629721Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490911162763183:2841] 2026-02-19T08:30:36.629764Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490911162763185:2843], CA [16:7608490911162763182:2840], CA [16:7608490911162763181:2839], CA [16:7608490911162763184:2842], trace_id# 2026-02-19T08:30:36.629800Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [16:7608490911162763185:2843], CA [16:7608490911162763182:2840], CA [16:7608490911162763181:2839], CA [16:7608490911162763184:2842], trace_id# 2026-02-19T08:30:36.629912Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490911162763184:2842] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 447 Tasks { TaskId: 4 CpuTimeUs: 92 FinishTimeMs: 1771489836629 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" AffectedPartitions: 1 } ComputeCpuTimeUs: 27 BuildCpuTimeUs: 65 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489836629 CreateTimeMs: 1771489836627 UpdateTimeMs: 1771489836629 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:30:36.629943Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490911162763184:2842] 2026-02-19T08:30:36.629979Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490911162763185:2843], CA [16:7608490911162763182:2840], CA [16:7608490911162763181:2839], trace_id# 2026-02-19T08:30:36.630012Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [16:7608490911162763185:2843], CA [16:7608490911162763182:2840], CA [16:7608490911162763181:2839], trace_id# 2026-02-19T08:30:36.630418Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490911162763182:2840] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 877 DurationUs: 1000 Tasks { TaskId: 2 CpuTimeUs: 164 FinishTimeMs: 1771489836630 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 9 ReadBytes: 114 AffectedPartitions: 1 } IngressRows: 9 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 82 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489836629 CreateTimeMs: 1771489836626 UpdateTimeMs: 1771489836630 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:30:36.630465Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490911162763182:2840] 2026-02-19T08:30:36.630504Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490911162763185:2843], CA [16:7608490911162763181:2839], trace_id# 2026-02-19T08:30:36.630541Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [16:7608490911162763185:2843], CA [16:7608490911162763181:2839], trace_id# 2026-02-19T08:30:36.630763Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490911162763181:2839] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1513 Tasks { TaskId: 1 CpuTimeUs: 945 FinishTimeMs: 1771489836630 OutputBytes: 1 Tables { TablePath: "/Root/ReorderOptionalKey" ReadRows: 46 ReadBytes: 585 AffectedPartitions: 2 } IngressRows: 46 ComputeCpuTimeUs: 190 BuildCpuTimeUs: 755 WaitOutputTimeUs: 45 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489836630 CreateTimeMs: 1771489836625 UpdateTimeMs: 1771489836630 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:30:36.630799Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490911162763181:2839] 2026-02-19T08:30:36.630828Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [16:7608490911162763185:2843], trace_id# 2026-02-19T08:30:36.630857Z node 16 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [16:7608490911162763185:2843], trace_id# 2026-02-19T08:30:36.630969Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [16:7608490911162763185:2843] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1340 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 238 FinishTimeMs: 1771489836630 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 138 BuildCpuTimeUs: 100 HostName: "ghrun-fkrokuvqke" NodeId: 16 StartTimeMs: 1771489836630 CreateTimeMs: 1771489836627 UpdateTimeMs: 1771489836630 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:30:36.631001Z node 16 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [16:7608490911162763185:2843] 2026-02-19T08:30:36.631112Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:30:36.631146Z node 16 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:30:36.631177Z node 16 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [16:7608490911162763174:2543] TxId: 281474976710742. Ctx: { TraceId: 01khtgbeqbchjkx5c1vmbhfsts, Database: /Root, SessionId: ydb://session/3?node_id=16&id=ZDkzMzE5YmEtMTFjODZmZmUtMjk1ZjQxOTUtOGRiYWUyMTc=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.004714s ReadRows: 56 ReadBytes: 711 RequestUnits# 56 ForceFlag# true trace_id# >> KqpNotNullColumns::ReplaceNotNull >> KqpNewEngine::LookupColumns [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> KqpRanges::IsNull >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg >> KqpRanges::DateKeyPredicate >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpSort::ImaginarySortReverse+InvertPkOverSort >> KqpAgg::AggWithLookup >> KqpSqlIn::CantRewrite [GOOD] >> KqpNamedExpressions::NamedExpressionRandomInsert+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomInsert-UseSink >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpSqlIn::PhasesCount [GOOD] >> KqpReturning::ReturningUpsertAsTableListWithNullable-QueryService [GOOD] >> KqpNewEngine::DuplicatedResults >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::SimpleUpsertSelect >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::JoinIdxLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::CantRewrite [GOOD] Test command err: 2026-02-19T08:29:42.195121Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490681143876339:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:42.195192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:42.377652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:42.403192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:42.403304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:42.447030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:42.448192Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:42.450664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490681143876298:2081] 1771489782193393 != 1771489782193396 2026-02-19T08:29:42.527431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:42.527450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:42.527476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:42.527542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:42.570858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:42.864840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:42.922872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.043920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.176950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.216719Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:43.237215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.897570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689733812764:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.897674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.898178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689733812774:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.898235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.226531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.251314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.275953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.300500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.326421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.355952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.396301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.442333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.507090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490694028780943:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.507157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.507208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490694028780948:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.507501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490694028780951:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.507545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:45.509984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:45.518280Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490694028780950:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:45.597177Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490694028781003:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:47.197807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490681143876339:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:47.197878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:47.943397Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490702185138673:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:47.943456Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:30.204745Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:30.298842Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:30.816074Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:30.830733Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:30.893580Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.035526Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:31.085502Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:31.163262Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.613813Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490906261973733:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.613906Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.614205Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490906261973742:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.614256Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.700142Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.742418Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.783495Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.824036Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.862729Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.906932Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.949035Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.992203Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490884787135417:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:34.992765Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:35.013787Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.123334Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490910556941923:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.123480Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.123519Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490910556941928:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.123839Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490910556941930:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.123900Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.128604Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:35.145648Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490910556941931:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:35.226952Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490910556941984:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:37.528606Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.602754Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.650516Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:40: Warning: At function: Filter, At lambda, At function: Coalesce
:5:48: Warning: At function: And
:5:41: Warning: At function: SqlIn
:5:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:4:40: Warning: At function: Filter, At lambda, At function: Coalesce
:5:48: Warning: At function: And
:5:41: Warning: At function: SqlIn
:5:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: 2026-02-19T08:29:36.880712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490656916871391:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:36.881394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:37.108415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:37.137139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:37.137303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:37.178298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:37.217744Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490656916871359:2081] 1771489776879107 != 1771489776879110 2026-02-19T08:29:37.228534Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:37.277159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:37.277182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:37.277189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:37.277286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:37.301541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:37.603983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:37.655927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.784158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.888236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:37.913102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.969900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.410994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490669801775121:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.411111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.411379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490669801775131:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.411426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.841383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.868928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.896403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.924375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.952414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.004341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.033949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.073247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.135677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674096743296:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.135750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.135792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674096743301:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.135919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674096743303:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.135958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.138864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.147962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490674096743305:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:29:40.251001Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490674096743356:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:41.880902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490656916871391:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:41.880998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [[[1u];["One"]];[[2u];["Two"]]] [[[1u];["One"]];[[2u];["Two"]]] 2026-02-19T08:29:42.991328Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490680698946781:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:42.991382Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=canno ... 893219435398:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:31.770177Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:31.790433Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:31.974716Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:31.978896Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:31.979021Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:31.979111Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490893219435367:2081] 1771489831768922 != 1771489831768925 2026-02-19T08:30:32.001804Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:32.062055Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:32.074428Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:32.074456Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:32.074468Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:32.074571Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:32.750460Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:32.762416Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:32.772439Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:32.777864Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:32.855499Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.063654Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.160263Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.488544Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490914694273730:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.488688Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.489149Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490914694273740:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.489239Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.593625Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.632613Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.680358Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.723390Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.762854Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.770378Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490893219435398:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:36.770465Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:36.804351Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.847050Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.907188Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.993534Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490914694274611:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.993646Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.994004Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490914694274617:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.994050Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490914694274616:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.994071Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.998509Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:37.015467Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490914694274620:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:37.078891Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490918989241968:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:39.301180Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningUpsertAsTableListWithNullable-QueryService [GOOD] Test command err: 2026-02-19T08:29:38.484829Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490663207716060:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:38.484883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:38.678570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:38.704285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:38.704391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:38.740651Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:38.743808Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490663207716029:2081] 1771489778483688 != 1771489778483691 2026-02-19T08:29:38.754175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:38.823634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:38.823655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:38.823665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:38.823732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:38.885374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:39.167615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:39.231514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.345077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.478488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.520308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:39.550848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.859987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490671797652500:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.860114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.860370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490671797652509:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.860438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.162475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.186687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.210286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.235342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.259861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.290094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.320849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.359603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.419961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676092620673:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.420026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.420096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676092620678:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.420169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490676092620680:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.420211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:41.423114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:41.432479Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490676092620682:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:41.511063Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490676092620735:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:42.854096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.882043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.909580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... eration_create_table.cpp:689) 2026-02-19T08:30:33.864701Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490899620937502:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:33.865298Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:33.897801Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:33.993746Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:33.993844Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:33.995544Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:34.006357Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490899620937442:2081] 1771489833857711 != 1771489833857714 2026-02-19T08:30:34.037909Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:34.105774Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:34.105802Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:34.105816Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:34.105924Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:34.151966Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:34.806437Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:34.818452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.916235Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:34.942431Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.138130Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.226248Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.328512Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490921095775800:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.328624Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.329095Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490921095775809:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.329155Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.428341Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.483789Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.520891Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.561030Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.599334Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.641206Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.682930Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.749611Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.851112Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490921095776680:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.851221Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.851237Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490921095776685:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.851841Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490921095776687:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.851935Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.856199Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:38.862161Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490899620937502:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:38.862393Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:38.918982Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490921095776688:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:38.974087Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490921095776743:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:41.178650Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: 2026-02-19T08:29:36.980789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490653168517583:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:36.982496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:37.167952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:37.185879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:37.186025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:37.238909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:37.240920Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:37.243415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490653168517551:2081] 1771489776979202 != 1771489776979205 2026-02-19T08:29:37.326165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:37.326181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:37.326188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:37.326258Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:37.421072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:37.607036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:37.662800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.766745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.883912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.938962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.060381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:39.641963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490666053421313:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.642056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.642240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490666053421322:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.642266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.025016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.053967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.081501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.110140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.137334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.169459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.218877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.254170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.312801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670348389492:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.312898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.313068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670348389498:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.313082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490670348389497:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.313117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.316303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.326308Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490670348389501:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:40.407617Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490670348389552:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:41.591209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.630302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.669331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... s undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:28.253020Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:33.441604Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490899203867166:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:33.441666Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:33.458040Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:33.556745Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:33.556859Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:33.558502Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:33.654460Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:33.722387Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:33.722410Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:33.722421Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:33.722528Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:33.746047Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:34.363030Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:34.376148Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.446875Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:34.455425Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.647044Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.727975Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.095981Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490920678705465:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.096083Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.096371Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490920678705474:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.096431Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.188355Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.232120Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.286427Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.328639Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.368564Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.410757Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.444532Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490899203867166:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:38.448658Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:38.457460Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.516113Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.663826Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490920678706349:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.663935Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.664039Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490920678706354:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.664237Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490920678706356:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.664287Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:38.668544Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:38.681584Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490920678706357:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:38.749832Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490920678706409:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpReturning::ReturningTypes [GOOD] >> KqpReturning::ReturningUpsertAsTableListNotNullOnly+QueryService >> KqpKv::ReadRows_TimeoutCancelsReads [GOOD] >> KqpKv::ReadRows_PgValue >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LiteralKeys >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> KqpNewEngine::MultipleBroadcastJoin [GOOD] >> GroupWriteTest::WithRead >> KqpSplit::BorderKeys+Descending >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpSplit::StreamLookupSplitAfterFirstResult >> KqpScan::SecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: 2026-02-19T08:29:44.603214Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490691345509044:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:44.603777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:44.770268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:44.794066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:44.794193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:44.835253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:44.840362Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:44.862876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490691345508985:2081] 1771489784598660 != 1771489784598663 2026-02-19T08:29:44.932456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:44.932489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:44.932499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:44.932594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:44.948019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:45.275927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:45.330043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.455860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.574300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.615898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:45.635625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:47.686709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490704230412753:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:47.686874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:47.687222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490704230412763:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:47.687279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:47.962337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:47.990256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.021096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.046648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.074375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.106188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.162039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.203774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.277967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490708525380932:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.278060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.278179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490708525380937:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.278635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490708525380939:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.278718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:48.281506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:48.291035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490708525380940:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:29:48.368778Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490708525380992:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:49.601107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490691345509044:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:49.601174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:50.554819Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490716596470833:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:50.554986Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... :35.107955Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:35.108065Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:35.112981Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:35.115295Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490902462633727:2081] 1771489834952491 != 1771489834952494 2026-02-19T08:30:35.150530Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:35.218999Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:35.219031Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:35.219042Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:35.219148Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:35.251090Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:35.930293Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:35.961232Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.011693Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:36.046404Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.232660Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.311275Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.589087Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490923937472090:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:39.589187Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:39.589642Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490923937472099:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:39.589692Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:39.681414Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.724363Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.761817Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.795715Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.828299Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.868352Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.909844Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.957272Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490902462633773:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:39.957351Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:39.974779Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.084313Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490928232440269:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.084441Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.084829Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490928232440274:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.084878Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490928232440275:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.085096Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.089688Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:40.103343Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490928232440278:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:40.201170Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490928232440330:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:42.364126Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.410044Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.456815Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) [] >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> KqpNewEngine::IndexAutochooserTopSort [GOOD] >> KqpNewEngine::IndexAutochooserAndLimitPushdown+AutoSelectIndex >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::UpdateMulti >> KqpSqlIn::InWithCast [GOOD] >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinWithPrecompute >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::DeleteON >> KqpSort::ImaginarySortReverse+InvertPkOverSort [GOOD] >> KqpSort::ImaginarySortReverse-InvertPkOverSort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: 2026-02-19T08:29:41.513051Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490674833797232:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:41.514650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:41.699946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:41.714679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:41.714807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:41.762290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:41.805999Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:41.808552Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490674833797182:2081] 1771489781511313 != 1771489781511316 2026-02-19T08:29:41.862387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:41.862433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:41.862446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:41.862508Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:41.863569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:42.200481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:42.255637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.357724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.489729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.528277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:42.547340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.379098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490687718700945:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.379230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.379583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490687718700955:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.379636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.691322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.716372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.743794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.770187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.797028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.825173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.882483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.927277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.993688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490687718701828:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.993775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.993814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490687718701833:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.993953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490687718701835:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.994014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.997833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:45.008591Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490687718701837:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:45.075813Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490692013669184:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:46.265984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.296871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.325322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... TADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490922663069752:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:38.738302Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:38.757661Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:38.853260Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:38.857905Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490922663069709:2081] 1771489838731885 != 1771489838731888 2026-02-19T08:30:38.870927Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:38.871029Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:38.936123Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:38.943617Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:39.010418Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:39.010445Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:39.010454Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:39.010547Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:39.259698Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:39.611432Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:39.616417Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:39.628192Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.697477Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.827694Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:39.893595Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.979927Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.067220Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490944137908070:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.067339Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.067661Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490944137908080:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.067716Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.161576Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.208894Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.246305Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.284492Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.325685Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.377491Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.426151Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.487705Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.594730Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490944137908947:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.594845Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.595698Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490944137908952:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.595831Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490944137908953:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.596003Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.601530Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:43.615527Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490944137908956:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:43.678377Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490944137909007:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:43.734599Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490922663069752:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:43.734678Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> KqpScan::RightJoinSimple >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2026-02-19T08:28:42.529435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:28:42.529483Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:42.530135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:28:42.541624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:28:42.541939Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:135:2156] 2026-02-19T08:28:42.542166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:28:42.586611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:113:2143], Recipient [1:135:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:28:42.593083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:28:42.593258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:28:42.594867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-02-19T08:28:42.594938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 9437184 2026-02-19T08:28:42.594990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 9437184 2026-02-19T08:28:42.595361Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:28:42.595445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:28:42.595526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 9437184 persisting started state actor id [1:204:2156] in generation 2 2026-02-19T08:28:42.664486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:28:42.690532Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-02-19T08:28:42.690711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:28:42.690805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 9437184, actorId: [1:220:2215] 2026-02-19T08:28:42.690859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 9437184 2026-02-19T08:28:42.690898Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-02-19T08:28:42.690929Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.691081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.691157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.691431Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-02-19T08:28:42.691527Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-02-19T08:28:42.691640Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.691675Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:28:42.691722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-02-19T08:28:42.691756Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-02-19T08:28:42.691806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-02-19T08:28:42.691838Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-02-19T08:28:42.691876Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:28:42.691974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:215:2212], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.692021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.692070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:213:2211], serverId# [1:215:2212], sessionId# [0:0:0] 2026-02-19T08:28:42.698102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:104:2137], Recipient [1:135:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 104 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-02-19T08:28:42.698183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:28:42.698263Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-02-19T08:28:42.698402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-02-19T08:28:42.698455Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-02-19T08:28:42.698517Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 1 at tablet 9437184 2026-02-19T08:28:42.698553Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-02-19T08:28:42.698587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-02-19T08:28:42.698635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-02-19T08:28:42.698665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.698905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-02-19T08:28:42.698947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-02-19T08:28:42.698981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-02-19T08:28:42.699010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.699058Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-02-19T08:28:42.699088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-02-19T08:28:42.699119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-02-19T08:28:42.699162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.699185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-02-19T08:28:42.710703Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-02-19T08:28:42.710747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-02-19T08:28:42.710784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-02-19T08:28:42.710812Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-02-19T08:28:42.710864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-02-19T08:28:42.711178Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:226:2221], Recipient [1:135:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.711231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:28:42.711262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 9437184, clientId# [1:225:2220], serverId# [1:226:2221], sessionId# [0:0:0] 2026-02-19T08:28:42.711369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287424, Sender [1:104:2137], Recipient [1:135:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-02-19T08:28:42.711399Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3234: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-02-19T08:28:42.711536Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-02-19T08:28:42.711576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-02-19T08:28:42.711607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-02-19T08:28:42.711638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-02-19T08:28:42.714188Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 104 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-02-19T08:28:42.714231Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:28:42.714431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:135:2156], Recipient [1:135:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.714509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:28:42.714547Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-02-19T08:28:42.714572Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:28:42.714594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-02-19T08:28:42.714628Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-02-19T08:28:42.714650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... ource 9437185 dest 9437184 consumer 9437184 txId 521 2026-02-19T08:30:46.957388Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-02-19T08:30:46.957422Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.957452Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2026-02-19T08:30:46.957519Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-02-19T08:30:46.957547Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.957579Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2026-02-19T08:30:46.957774Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-02-19T08:30:46.957814Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.957844Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2026-02-19T08:30:46.958010Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-02-19T08:30:46.958051Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958080Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2026-02-19T08:30:46.958222Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-02-19T08:30:46.958247Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958269Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2026-02-19T08:30:46.958372Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-02-19T08:30:46.958397Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958419Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2026-02-19T08:30:46.958468Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-02-19T08:30:46.958489Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958510Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2026-02-19T08:30:46.958683Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-02-19T08:30:46.958709Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958733Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2026-02-19T08:30:46.958804Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-02-19T08:30:46.958827Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.958847Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2026-02-19T08:30:46.958979Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-02-19T08:30:46.959005Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959025Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2026-02-19T08:30:46.959128Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-02-19T08:30:46.959151Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959168Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2026-02-19T08:30:46.959228Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-02-19T08:30:46.959260Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959288Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2026-02-19T08:30:46.959460Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-02-19T08:30:46.959494Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959516Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2026-02-19T08:30:46.959655Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-02-19T08:30:46.959691Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959724Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2026-02-19T08:30:46.959885Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-02-19T08:30:46.959921Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.959952Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2026-02-19T08:30:46.960129Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-02-19T08:30:46.960166Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.960196Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2026-02-19T08:30:46.980413Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:30:46.980501Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2026-02-19T08:30:46.980597Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:104:2137], exec latency: 1 ms, propose latency: 2 ms 2026-02-19T08:30:46.980684Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:30:46.980735Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:30:46.980989Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-02-19T08:30:46.981027Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2026-02-19T08:30:46.981080Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:104:2137], exec latency: 0 ms, propose latency: 1 ms 2026-02-19T08:30:46.981131Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-02-19T08:30:46.981365Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269287938, Sender [10:241:2232], Recipient [10:348:2315]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-02-19T08:30:46.981421Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3236: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-02-19T08:30:46.981485Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 31 30 30 31 31 25 25 24 31 28 30 29 28 26 30 10 21 10 30 20 9 10 26 4 4 8 7 8 - 28 - - actual 31 30 30 31 31 25 25 24 31 28 30 29 28 26 30 10 21 10 30 20 9 10 26 4 4 8 7 8 - 28 - - interm 29 30 30 28 21 25 25 24 30 28 30 29 28 26 30 10 21 10 30 20 9 10 26 4 4 8 7 8 - 28 - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: 2026-02-19T08:30:05.659534Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490780868944051:2234];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:05.659647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:05.811780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:05.821397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:05.821487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:05.897829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:05.921153Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:05.922317Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490780868943855:2081] 1771489805650051 != 1771489805650054 2026-02-19T08:30:05.983236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:05.983259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:05.983265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:05.983336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:05.996476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:06.395640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:06.402483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:06.662414Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:08.590504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490793753846639:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.590590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.590868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490793753846649:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.590905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.870209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.973941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490793753846745:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.974047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.974644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490793753846750:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.974665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490793753846751:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.974714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.978526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:08.989194Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490793753846754:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:30:09.080346Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490798048814101:2597] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:09.255496Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490798048814142:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2026-02-19T08:30:09.255882Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWJiZTZmM2UtNzJmZjk0YTItMTQzMDUwYjYtNGIwYmNlMQ==, ActorId: [1:7608490793753846636:2326], ActorState: ExecuteState, LegacyTraceId: 01khtgamtj5tja3hnc4mkqe9hj, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 14 } message: "At function: KiWriteTable!" end_position { row: 1 column: 14 } severity: 1 issues { position { row: 1 column: 14 } message: "Missing key column in input: Key for table: /Root/TestReplaceNotNullPk" end_position { row: 1 column: 14 } issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:09.278611Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490798048814151:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:30:09.279781Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWJiZTZmM2UtNzJmZjk0YTItMTQzMDUwYjYtNGIwYmNlMQ==, ActorId: [1:7608490793753846636:2326], ActorState: ExecuteState, LegacyTraceId: 01khtgamvf1ybv65w61pfs4k98, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 14 } message: "At function: KiWriteTable!" end_position { row: 1 column: 14 } severity: 1 issues { position { row: 1 column: 49 } message: "Failed to convert type: Struct<\'Key\':Null,\'Value\':String> to Struct<\'Key\':Uint64,\'Value\':String?>" end_position { row: 1 column: 49 } severity: 1 issues { position { row: 1 column: 49 } message: "Failed to convert \'Key\': Null to Uint64" end_position { row: 1 column: 49 } severity: 1 } } issues { position { row: 1 column: 49 } message: "Failed to convert input columns types to scheme types" end_position { row: 1 column: 49 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:10.006712Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490802616777040:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:10.006798Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:10.030402Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:10.134504Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:10.144836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:10.144909Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:10.171303Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:10.239825Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:10.239846Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:10.239853Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:10.239931Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:10.257273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: ... :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490940989752411:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.939049Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490940989752412:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.939128Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:43.943716Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:43.957726Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490940989752415:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:30:44.043983Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490945284719764:2627] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:44.928705Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490923809882221:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:44.928788Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:45.197250Z node 8 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [8:7608490949579687200:2382], TxId: 281474976715663, task: 1. Ctx: { TraceId : 01khtgbqhq6kpdpjdgfn1b3wwk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2026-02-19T08:30:45.197851Z node 8 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [8:7608490949579687201:2383], TxId: 281474976715663, task: 2. Ctx: { TraceId : 01khtgbqhq6kpdpjdgfn1b3wwk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [8:7608490949579687196:2327], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:30:45.198116Z node 8 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [8:7608490949579687202:2384], TxId: 281474976715663, task: 3. Ctx: { TraceId : 01khtgbqhq6kpdpjdgfn1b3wwk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [8:7608490949579687196:2327], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:30:45.198434Z node 8 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [8:7608490949579687203:2385], TxId: 281474976715663, task: 4. Ctx: { CheckpointId : . TraceId : 01khtgbqhq6kpdpjdgfn1b3wwk. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [8:7608490949579687196:2327], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:30:45.198773Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbqhq6kpdpjdgfn1b3wwk, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "Tried to insert NULL value into NOT NULL column: Index1" issue_code: 2031 severity: 1 } trace_id# 2026-02-19T08:30:45.233858Z node 8 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-02-19T08:30:45.264259Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490949579687225:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2026-02-19T08:30:45.266134Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbqzt54d45kj50deyaycp, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 13 } message: "Missing not null column in input: Index1. All not null columns should be initialized" end_position { row: 1 column: 13 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:45.296407Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490949579687244:2397], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2026-02-19T08:30:45.297621Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbr0y4j5nksv3hst9n24h, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 13 } message: "Missing not null column in input: Index1. All not null columns should be initialized" end_position { row: 1 column: 13 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:45.334337Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490949579687263:2405], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2026-02-19T08:30:45.337532Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbr21cn98ggy31m3x9e8a, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 14 } message: "At function: KiWriteTable!" end_position { row: 1 column: 14 } severity: 1 issues { position { row: 1 column: 14 } message: "Missing not null column in input: Index1. All not null columns should be initialized" end_position { row: 1 column: 14 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:45.821233Z node 8 :KQP_EXECUTER ERROR: {KQPLIT@kqp_literal_executer.cpp:121} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. TKqpLiteralExecuter, TKqpEnsure failed. trace_id# 2026-02-19T08:30:45.835140Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490949579687282:2413], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2026-02-19T08:30:45.837962Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbr353jvzkyncpfg5yzdd, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { message: "Tried to insert NULL value into NOT NULL column: Index1" issue_code: 2031 severity: 1 } } tx_id# trace_id# 2026-02-19T08:30:46.624647Z node 8 :KQP_EXECUTER ERROR: {KQPLIT@kqp_literal_executer.cpp:121} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. TKqpLiteralExecuter, TKqpEnsure failed. trace_id# 2026-02-19T08:30:46.630004Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490949579687304:2423], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2026-02-19T08:30:46.630397Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbrjy72gcg157cz5n45xv, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { message: "Tried to insert NULL value into NOT NULL column: Index1" issue_code: 2031 severity: 1 } } tx_id# trace_id# 2026-02-19T08:30:47.332587Z node 8 :KQP_EXECUTER ERROR: {KQPLIT@kqp_literal_executer.cpp:121} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. TKqpLiteralExecuter, TKqpEnsure failed. trace_id# 2026-02-19T08:30:47.338175Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [8:7608490953874654625:2434], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2026-02-19T08:30:47.339555Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=8&id=ZWI3NzdlZTQtNzZlNTgzMWEtYjdmOGUzNS1iZjU0YjU3OQ==, ActorId: [8:7608490940989752251:2327], ActorState: ExecuteState, LegacyTraceId: 01khtgbsbf7g9yzgafhfs2gpy7, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { message: "Tried to insert NULL value into NOT NULL column: Index1" issue_code: 2031 severity: 1 } } tx_id# trace_id# >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: 2026-02-19T08:29:36.895696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490655715273805:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:36.895747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:37.087941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:37.094178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:37.094271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:37.160050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:37.164795Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:37.166214Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490655715273774:2081] 1771489776894317 != 1771489776894320 2026-02-19T08:29:37.259288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:37.259307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:37.259314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:37.259473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:37.384281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:37.642814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:37.687468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.787992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.911501Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:37.919405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.987087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.403574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490668600177538:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.403718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.404092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490668600177548:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.404190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.728849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.757873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.784658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.810911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.836988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.869185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.900112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:39.944756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.009855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672895145714:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.009943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.010050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672895145719:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.010206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490672895145721:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.010261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.013422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.022410Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490672895145723:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:40.131047Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490672895145774:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:41.640862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.679038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:41.716408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... .cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:37.027310Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:37.090894Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:37.094424Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:37.094444Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:37.094453Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:37.094541Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:37.788868Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:37.802490Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.873749Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.880758Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:38.055131Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.138069Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.197142Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490932359874383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.197246Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.197685Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490932359874392:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.197759Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.293959Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.344645Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.406504Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.454924Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.502467Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.547597Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.594726Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.681732Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.810912Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490932359875272:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.811039Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.811567Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490932359875277:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.811631Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490932359875278:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.811771Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.816027Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:41.829637Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490932359875281:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:41.874854Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490910885036072:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.874939Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:41.885273Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490932359875334:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:44.088743Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.134989Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.241910Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At lambda, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpScan::YqlTableSample >> KqpUnion::ParallelUnionAll [GOOD] >> KqpSplit::AfterResolve+Descending >> KqpScan::EarlyFinish >> KqpReturning::ReturningUpsertAsTableListNotNullOnly+QueryService [GOOD] >> KqpReturning::ReturningUpsertAsTableListNotNullOnly-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultiUsageInnerConnection [GOOD] Test command err: 2026-02-19T08:29:51.090592Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490719448255360:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:51.091320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:51.256584Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:51.281071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:51.281169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:51.320830Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:51.325444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490719448255317:2081] 1771489791088725 != 1771489791088728 2026-02-19T08:29:51.340724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:51.428608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:51.428628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:51.428640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:51.428723Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:51.460040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:51.783119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:51.824979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:51.947442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:52.073313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:52.110651Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:52.133840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.020290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490732333159077:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.020376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.020614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490732333159087:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.020668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.323409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.347115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.374703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.402793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.428897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.459343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.515444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.557660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.621614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490732333159955:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.621693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.621755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490732333159960:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.621891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490732333159962:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.621936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:54.625339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:54.635358Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490732333159964:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:54.734743Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490732333160015:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:56.090775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490719448255360:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:56.090846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:57.555246Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490746179409016:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:57.555353Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... 2515]; 2026-02-19T08:30:37.470324Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:41.336181Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490933841082113:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.337270Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:41.351871Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:41.438953Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:41.440639Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490933841082062:2081] 1771489841333932 != 1771489841333935 2026-02-19T08:30:41.455612Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:41.455726Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:41.488261Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:41.570580Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:41.570613Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:41.570624Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:41.570732Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:41.626229Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:42.247723Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:42.254429Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:42.261702Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.343664Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:42.353339Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.576285Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.665684Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.820812Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490951020953116:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.820908Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.821230Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490951020953126:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.821296Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.914056Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.952759Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.991884Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.029212Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.068910Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.108682Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.147723Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.211267Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.297892Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490955315921293:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.297996Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.298279Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490955315921298:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.298292Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490955315921299:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.298366Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.302703Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:46.314286Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490955315921302:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:46.335919Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490933841082113:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:46.336015Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:46.411667Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490955315921356:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScan::UnionWithPureExpr >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning+UseSink-UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning-UseSink+UseDataQuery >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date32_Limit4 >> KqpSplit::BorderKeys+Descending [GOOD] >> KqpSplit::BorderKeys+Unspecified >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpUnion::ParallelUnionAll [GOOD] Test command err: 2026-02-19T08:29:37.838552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490661217118543:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:37.839303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:38.032628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:38.057584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:38.057670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:38.102581Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:38.105438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490661217118509:2081] 1771489777837295 != 1771489777837298 2026-02-19T08:29:38.117530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:38.185867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:38.185897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:38.185911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:38.186044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:38.187314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:38.530838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:38.577308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.673700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.783880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.834643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:38.843808Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:40.405019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674102022268:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.405165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.405453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674102022278:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.405504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.649395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.673826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.696950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.720244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.744623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.773593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.803103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.845837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.920538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674102023148:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.920660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.920757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674102023153:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.920819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674102023155:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.920881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.923815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.932454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490674102023157:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:41.014000Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490678396990504:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:42.216623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.247292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.276170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.530109Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.530125Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.539963Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.540037Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.540056Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.540075Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.540123Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.540138Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.550181Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.550257Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.550285Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.552331Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.552414Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.552433Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.559479Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.559550Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.559565Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.566623Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.566699Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.566717Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.567964Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.568024Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.568038Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576449Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576558Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576576Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576590Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576662Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.576679Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.585650Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.585738Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.585757Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.586676Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.586731Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.586748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-02-19T08:30:45.647152Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490953655267738:3131], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.647286Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.647641Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490953655267743:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.647697Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490953655267744:3135], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.647888Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.653135Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:45.675448Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490953655267747:3136], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:30:45.749301Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490953655267800:5259] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:50.287227Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:30:50.287281Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpNewEngine::LiteralKeys [GOOD] >> KqpSplit::StreamLookupSplitAfterFirstResult [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead >> GroupWriteTest::WithRead [GOOD] >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::SelfJoin >> KqpScan::Grep >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 9472604011730831262 2026-02-19T08:30:47.010491Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-02-19T08:30:47.034475Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 received TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-02-19T08:30:47.034576Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-02-19T08:30:47.037256Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 received TEvBlockResult {Status# OK} 2026-02-19T08:30:47.051558Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:30:47.054596Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 received TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-02-19T08:30:54.434095Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet received PoisonPill, going to die 2026-02-19T08:30:54.434208Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-02-19T08:30:54.519258Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 received TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::ScanKeyPrefix >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpRanges::CastKeyBounds >> KqpNewEngine::StaleRO+EnableFollowers [GOOD] >> KqpNewEngine::StaleRO-EnableFollowers >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::ScalarMultiUsage |95.0%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Interval64_Limit3 >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::DqSourceCount |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: 2026-02-19T08:29:54.129659Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490730857083309:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:54.129848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:54.300916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:54.325941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:54.326041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:54.375878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:54.400724Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:54.402353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490730857083275:2081] 1771489794128310 != 1771489794128313 2026-02-19T08:29:54.450778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:54.450806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:54.450824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:54.450976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:54.462496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:54.788657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:54.850785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:54.962101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:55.085083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:55.136560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:55.149230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.264871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490743741987044:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.265053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.265672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490743741987054:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.265747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.626187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.654458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.681392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.706173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.734569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.826309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.853351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.893107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.952378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490743741987926:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.952445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.952701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490743741987931:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.952749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490743741987932:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.952815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:57.955992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:57.966512Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490743741987935:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:58.063553Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490748036955282:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:59.136577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490730857083309:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:59.142248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:00.261378Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490758989091909:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:00.261463Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... 292265Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490911416402268:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.292983Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:45.380353Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608490952308240882:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:45.380403Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:45.401534Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:45.520859Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:45.520974Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:45.523249Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:45.523689Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608490952308240841:2081] 1771489845379052 != 1771489845379055 2026-02-19T08:30:45.566453Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:45.581794Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:45.674432Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:45.674467Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:45.674486Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:45.674585Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:46.293484Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:46.322530Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.393400Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:46.397477Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.579916Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.709390Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:49.872554Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490969488111901:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:49.872680Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:49.873467Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490969488111910:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:49.873552Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:49.967427Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.019330Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.071504Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.137606Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.193110Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.241322Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.287794Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.348694Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.396650Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608490952308240882:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:50.402383Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:50.454110Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490973783080079:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.454218Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.454396Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490973783080084:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.454428Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608490973783080085:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.454483Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.459374Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:50.475358Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608490973783080088:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:50.555565Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608490973783080139:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNullInValue >> KqpScan::IsNull >> KqpNewEngine::IndexAutochooserAndLimitPushdown+AutoSelectIndex [GOOD] >> KqpNewEngine::IndexAutochooserAndLimitPushdown-AutoSelectIndex >> KqpScan::RightJoinSimple [GOOD] >> KqpScan::RightOnlyJoinSimple >> KqpScan::SecondaryIndex [GOOD] >> KqpScan::SecondaryIndexCustomColumnOrder >> KqpNamedExpressions::NamedExpressionRandom+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandom-UseSink |95.0%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: 2026-02-19T08:29:47.199416Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490700968848045:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:47.199474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:47.387844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:47.408777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:47.408885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:47.464741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:47.466454Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:47.467852Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490700968848014:2081] 1771489787197862 != 1771489787197865 2026-02-19T08:29:47.560488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:47.564673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:47.564693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:47.564705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:47.564815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:47.920408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:47.976911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.096649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.208039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:48.235024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.289548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:49.850692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490709558784491:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:49.850804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:49.851179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490709558784501:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:49.851229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.144921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.169031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.195916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.219218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.245989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.278858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.338249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.379340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:50.444155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490713853752668:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.444286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.444367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490713853752673:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.444480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490713853752675:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.444511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:50.447429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:50.455904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490713853752677:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:50.531250Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490713853752728:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:51.966131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:52.000174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:52.033885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__opera ... eration_create_table.cpp:689) 2026-02-19T08:30:45.137863Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490950553387254:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:45.138221Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:45.152597Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:45.267099Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:45.269282Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490950553387108:2081] 1771489845129807 != 1771489845129810 2026-02-19T08:30:45.279218Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:45.279322Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:45.301740Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:45.381951Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:45.381973Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:45.381987Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:45.382072Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:45.449498Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:45.980375Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:46.000016Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.066416Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.193907Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:46.243328Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.369390Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.137809Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490950553387254:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:50.139435Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:50.220039Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490972028225470:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.220219Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.224567Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490972028225481:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.224706Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.318871Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.366455Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.416905Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.460647Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.495360Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.534046Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.573571Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.631172Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.725427Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490972028226353:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725524Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725791Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490972028226359:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725791Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490972028226358:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725848Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.729649Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:50.742777Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490972028226362:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:50.830383Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490972028226413:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:53.055505Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSort::ImaginarySortReverse-InvertPkOverSort [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> KqpScan::TwoAggregatesTwoWindows >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpScan::YqlTableSample [GOOD] >> KqpSplit::AfterResolve+Ascending >> KqpSplit::AfterResolve+Descending [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> KqpSplit::AfterResolve+Unspecified >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpNewEngine::ItemsLimit >> KqpRanges::Like [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpScan::UnionWithPureExpr [GOOD] >> KqpScan::UnionThree >> KqpNotNullColumns::AlterAddIndex >> KqpScan::CrossJoin >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpSplit::BorderKeys+Unspecified [GOOD] >> KqpMergeCn::TopSortBy_Date32_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime64_Limit3 >> KqpNewEngine::JoinSameKey |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: 2026-02-19T08:30:00.510241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490759374600943:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:00.510306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:00.690881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:00.709211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:00.709297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:00.765611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:00.791348Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:00.793247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490759374600904:2081] 1771489800508588 != 1771489800508591 2026-02-19T08:30:00.855614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:00.855633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:00.855645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:00.855777Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:00.958408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:01.279091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:01.326355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:01.445814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:01.544915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:01.582058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:01.652707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.484721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490772259504671:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:03.484817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:03.485196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490772259504681:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:03.485233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:03.839885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.873091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.902528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.928799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.967970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:04.040307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:04.092754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:04.127090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:04.199940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490776554472849:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:04.200062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:04.200412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490776554472854:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:04.200461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490776554472855:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:04.200583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:04.204498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:04.216237Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490776554472858:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:04.276961Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490776554472909:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:05.510247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490759374600943:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:05.510320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:05.743354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:05.995092Z no ... 970937798680:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:49.682034Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:49.743490Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:49.853908Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490970937798636:2081] 1771489849675872 != 1771489849675875 2026-02-19T08:30:49.872320Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:49.874961Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:49.875059Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:49.888349Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:50.015315Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:50.028986Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:50.029016Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:50.029029Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:50.029134Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:50.690892Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:50.790589Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:50.795984Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:50.805557Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.896257Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:51.081044Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:51.241028Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.254990Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490992412636988:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.255092Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.255512Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490992412636998:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.255573Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.345351Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.385823Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.441571Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.488507Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.540111Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.588580Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.632039Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.681041Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490970937798680:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:54.683228Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:54.691468Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.805334Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490992412637876:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.805480Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.805831Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490992412637881:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.805895Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490992412637882:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.806061Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.809800Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:54.821534Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490992412637885:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:54.884993Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490992412637936:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:57.250629Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] Test command err: 2026-02-19T08:29:56.265799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490743252825554:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:56.265842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:56.475882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:56.475975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:56.498315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:56.516724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:56.518878Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:56.520763Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490743252825522:2081] 1771489796264441 != 1771489796264444 2026-02-19T08:29:56.616003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:56.616039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:56.616067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:56.616211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:56.781199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:56.975667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:57.019100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.135879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.275431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:57.281572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.344529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.191217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490756137729301:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.191298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.191502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490756137729311:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.191543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.533116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.557224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.580951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.606936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.629945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.675823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.700175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.736684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.796819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490756137730181:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.796879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490756137730186:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.796889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.797049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490756137730188:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.797080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.799696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:59.808733Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490756137730189:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:59.899270Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490756137730241:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:01.265967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490743252825554:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:01.266026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:01.461074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:01.693345Z no ... 2026-02-19T08:30:48.920611Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:49.501591Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:49.514368Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:49.527877Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:49.603613Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:49.742109Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:49.800937Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:49.909855Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:52.970515Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490981992313225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:52.970616Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:52.970957Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490981992313234:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:52.971007Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.072900Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.125126Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.160538Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.214998Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.262222Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.311761Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.362028Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.425630Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.528347Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490986287281402:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.528437Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.528837Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490986287281408:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.528841Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608490986287281407:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.528891Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.532820Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:53.545952Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608490986287281411:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:53.626018Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490986287281462:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:53.709855Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490964812442211:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:53.709958Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:55.922670Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.223414Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.534449Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.767044Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.328437Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Warning: Type annotation, code: 1030
:1:44: Warning: At lambda, At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:25:41.769577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:41.859926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:41.864555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:456:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:25:41.864801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:25:41.864934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:25:42.241026Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:42.341096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:42.341216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:42.398924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:42.496071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:25:43.145932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:25:43.145975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:25:43.145997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:25:43.147861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:25:43.148170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:25:43.209773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:25:43.750354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-02-19T08:25:47.077536Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:25:47.085646Z node 2 :STATISTICS INFO: service_impl.cpp:231: Subscribed for config changes on node 2 2026-02-19T08:25:47.089661Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:25:47.121814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.121943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.161147Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:25:47.163204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.356316Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:25:47.356404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:25:47.357461Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.358076Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.358766Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359308Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359408Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359553Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.359762Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-02-19T08:25:47.374964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:25:47.575556Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:25:47.625973Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-02-19T08:25:47.626066Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-02-19T08:25:47.662101Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-02-19T08:25:47.663344Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-02-19T08:25:47.663568Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-02-19T08:25:47.663654Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-02-19T08:25:47.663731Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-02-19T08:25:47.663793Z node 2 :STATISTICS DEBUG: tx_init.cpp:226: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-02-19T08:25:47.663859Z node 2 :STATISTICS DEBUG: tx_init.cpp:267: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-02-19T08:25:47.663910Z node 2 :STATISTICS DEBUG: tx_init.cpp:274: [72075186224037894] TTxInit::Complete 2026-02-19T08:25:47.665133Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-02-19T08:25:47.681621Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8545: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.681771Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8575: ConnectToSA(), pipe client id: [2:2135:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-02-19T08:25:47.709698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2180:2628] 2026-02-19T08:25:47.711574Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2180:2628], schemeshard id = 72075186224037897 2026-02-19T08:25:47.774497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2243:2656] 2026-02-19T08:25:47.778811Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-02-19T08:25:47.799220Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Describe result: PathErrorUnknown 2026-02-19T08:25:47.799299Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Creating table 2026-02-19T08:25:47.799396Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-02-19T08:25:47.811845Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2331:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:25:47.840268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:25:47.869241Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-02-19T08:25:47.869378Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Subscribe on create table tx: 281474976720657 2026-02-19T08:25:47.880244Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Subscribe on tx: 281474976720657 registered 2026-02-19T08:25:48.072484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-02-19T08:25:48.099256Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-02-19T08:25:48.340630Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-02-19T08:25:48.460456Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:25:48.460552Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2258:2665] Owner: [2:2253:2664]. Column diff is empty, finishing 2026-02-19T08:25:49.007099Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:29:05.470803Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4643:4128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:29:05.470997Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-02-19T08:29:05.471029Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 4, ReplyToActorId = [2:4643:4128], StatRequests.size() = 1 2026-02-19T08:29:05.599752Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4627:4114], ActorId: [2:4628:4115], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmE0YTc5ZjItMzIzZGU2NmYtMTk0YzI3MWItOTkzOWQ0ODA=, TxId: 2026-02-19T08:29:05.599847Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4627:4114], ActorId: [2:4628:4115], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmE0YTc5ZjItMzIzZGU2NmYtMTk0YzI3MWItOTkzOWQ0ODA=, TxId: 2026-02-19T08:29:05.600194Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4625:4112], ActorId: [2:4627:4114], Got response [2:4628:4115] SUCCESS 2026-02-19T08:29:05.600516Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:29:05.625614Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:29:05.625698Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId2", ActorId=[1:4382:4005] 2026-02-19T08:29:05.627000Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4661:4138] 2026-02-19T08:29:05.627760Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3160:3439] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-02-19T08:29:05.627813Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:40: [72075186224037894] TTxAnalyze::Execute. Reattach to existing force traversal. OperationId "operationId" , ReplyToActorId [1:3160:3439] 2026-02-19T08:29:05.627874Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:112: [72075186224037894] TTxAnalyze::Complete 2026-02-19T08:29:05.628042Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:654: [72075186224037894] ScheduleNextAnalyze 2026-02-19T08:29:05.628173Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:697: [72075186224037894] ScheduleNextAnalyze. operationId: "operationId", started analyzing table: [OwnerId: 72075186224037897, LocalPathId: 34], AnalyzeActorId: [2:4662:4139] 2026-02-19T08:29:05.628798Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:29:05.632247Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], RunStreamQuery with text: SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table` 2026-02-19T08:29:05.632387Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], Start read next stream part 2026-02-19T08:29:05.672689Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4675:4151]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:29:05.673031Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-02-19T08:29:05.673083Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 5, ReplyToActorId = [2:4675:4151], StatRequests.size() = 1 2026-02-19T08:30:04.818140Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:30:04.818393Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], Start read next stream part 2026-02-19T08:30:04.818615Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtg8pq02x9e2kzndmqne9wp", SessionId: ydb://session/3?node_id=2&id=MWFkYThiNDctODhjNTc5MWMtZDZlNWYzNWUtNDQ4M2JiNjI=, Slow query, duration: 59.182980s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(`Key`),min(`Key`),max(`Key`),HLL(`Value`) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:30:04.820227Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-02-19T08:30:04.823145Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:30:04.823223Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4664:4141], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjQ3MTQ3MS01NzA5OWY2NS05ZjkxZTI2OS0xN2MyZjg2Ng==, TxId: 2026-02-19T08:30:04.823379Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32260, txId: 18446744073709551615] shutting down 2026-02-19T08:30:04.823913Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table` 2026-02-19T08:30:04.824051Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], Start read next stream part 2026-02-19T08:30:04.892698Z node 2 :STATISTICS DEBUG: service_impl.cpp:705: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4785:4250]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-02-19T08:30:04.893115Z node 2 :STATISTICS DEBUG: service_impl.cpp:722: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-02-19T08:30:04.893172Z node 2 :STATISTICS DEBUG: service_impl.cpp:1224: ReplySuccess(), request id = 6, ReplyToActorId = [2:4785:4250], StatRequests.size() = 1 2026-02-19T08:30:57.067147Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-02-19T08:30:57.067320Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], Start read next stream part 2026-02-19T08:30:57.067538Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01khtgaggqe4srr05p30h6xxx4", SessionId: ydb://session/3?node_id=2&id=MTg4NjZmODMtYzBiMjZlNzMtN2FiNjIxOTktMWM2MzFhNzg=, Slow query, duration: 52.238797s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(`Value`,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-02-19T08:30:57.070354Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32260, txId: 18446744073709551615] shutting down 2026-02-19T08:30:57.070798Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-02-19T08:30:57.070860Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4662:4139], ActorId: [2:4767:4235], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzUxNDQ3MTQtYjI1NjhlMDgtYjFiMDM5YWUtNDA3NzViODM=, TxId: 2026-02-19T08:30:57.071308Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4875:4334], ActorId: [2:4879:4337], Starting query actor #1 [2:4880:4338] 2026-02-19T08:30:57.071376Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4879:4337], ActorId: [2:4880:4338], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-02-19T08:30:57.076383Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4879:4337], ActorId: [2:4880:4338], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWRkNTE0NTMtNzE4YzQ4NDctNjM3NmEwY2UtZGViYTdlOTQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-02-19T08:30:57.151456Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4879:4337], ActorId: [2:4880:4338], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWRkNTE0NTMtNzE4YzQ4NDctNjM3NmEwY2UtZGViYTdlOTQ=, TxId: 2026-02-19T08:30:57.151564Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4879:4337], ActorId: [2:4880:4338], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWRkNTE0NTMtNzE4YzQ4NDctNjM3NmEwY2UtZGViYTdlOTQ=, TxId: 2026-02-19T08:30:57.152099Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4875:4334], ActorId: [2:4879:4337], Got response [2:4880:4338] SUCCESS 2026-02-19T08:30:57.152484Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:33: [72075186224037894] TTxFinishTraversal::Execute 2026-02-19T08:30:57.188075Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:45: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-02-19T08:30:57.188175Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:59: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId="operationId", ActorId=[1:3160:3439] >> ExternalIndex::Simple [GOOD] >> KqpReturning::ReturningUpsertAsTableListNotNullOnly-QueryService [GOOD] >> KqpReturning::ReturningDeleteUpdate+UseSink >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Nulls >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScan::Grep [GOOD] >> KqpScan::GrepByString >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Unspecified [GOOD] Test command err: 2026-02-19T08:30:46.768520Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490956444745076:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:46.768593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:46.981007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:47.004959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:47.005039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:47.047845Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:47.051997Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490956444745045:2081] 1771489846767009 != 1771489846767012 2026-02-19T08:30:47.066814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:47.174927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:47.178842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:47.178868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:47.178880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:47.178968Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:47.584924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:47.591740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:47.660868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:47.775438Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:47.802810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:47.976869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.041925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.020086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490973624616119:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.020210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.020537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490973624616129:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.020586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.341818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.374625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.404821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.438311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.476496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.522826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.586837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.643380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.725039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490973624616995:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490973624617000:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490973624617001:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.725760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.729091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:50.738164Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490973624617004:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:50.842684Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490973624617057:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:51.768534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490956444745076:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:51.768608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 S ... t_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:53.901874Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:53.945904Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490985778033943:2081] 1771489853887197 != 1771489853887200 2026-02-19T08:30:53.966206Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:53.999789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:53.999847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:54.004639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:54.058314Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:54.058340Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:54.058358Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:54.058433Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:54.091745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:54.456475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:54.473135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.546698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.677574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.744506Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.912816Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:57.170269Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491002957905001:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.170349Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.170775Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491002957905011:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.170854Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.244352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.277821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.311383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.343578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.380152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.421524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.459887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.510710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.594670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491002957905880:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.594773Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.595026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491002957905886:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.595102Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.595134Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491002957905885:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.598797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:57.614735Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491002957905889:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:57.713099Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491002957905940:3759] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:58.893821Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490985778034003:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:58.894544Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:30:59.955320Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489859426, txId: 281474976715673] shutting down >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:27:05.488151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:27:05.602210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:27:05.622529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:27:05.622611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2026-02-19T08:27:05.625414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:27:05.625574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:27:05.875096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:27:05.875285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:27:05.950537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489622890101 != 1771489622890105 2026-02-19T08:27:05.969970Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:27:06.016362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:27:06.121163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:27:06.609506Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:70:2117] Handle TEvGetProxyServicesRequest 2026-02-19T08:27:06.610053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:70:2117] Handle TEvGetProxyServicesRequest 2026-02-19T08:27:06.614118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:27:06.615054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:27:06.615103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:27:06.615135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:27:06.615714Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:27:06.630898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:27:06.779989Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:70:2117] Handle TEvProposeTransaction 2026-02-19T08:27:06.780059Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:70:2117] TxId# 281474976710657 ProcessProposeTransaction 2026-02-19T08:27:06.780153Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:70:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:913:2774] 2026-02-19T08:27:06.854497Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1834: Actor# [1:913:2774] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "new_column1" Type: "Uint64" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-02-19T08:27:06.854615Z node 1 :TX_PROXY DEBUG: schemereq.cpp:650: Actor# [1:913:2774] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-02-19T08:27:06.855305Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1899: Actor# [1:913:2774] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-02-19T08:27:06.855398Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1889: Actor# [1:913:2774] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-02-19T08:27:06.855763Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1641: Actor# [1:913:2774] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-02-19T08:27:06.855905Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:913:2774] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-02-19T08:27:06.856058Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:913:2774] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-02-19T08:27:06.859841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-02-19T08:27:06.860445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1543: Actor# [1:913:2774] txid# 281474976710657 HANDLE EvClientConnected 2026-02-19T08:27:06.861151Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1565: Actor# [1:913:2774] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-02-19T08:27:06.861228Z node 1 :TX_PROXY DEBUG: schemereq.cpp:630: Actor# [1:913:2774] txid# 281474976710657 SEND to# [1:912:2773] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-02-19T08:27:06.948877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:27:06.983040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:27:06.983408Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-02-19T08:27:06.991299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:27:06.991592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:27:06.991875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:27:06.992011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:27:06.992122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:27:06.992287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:27:06.992432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:27:06.992560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:27:06.992683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:27:06.992816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:27:06.992950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:27:06.993097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:27:06.993241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:27:07.018848Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-02-19T08:27:07.019101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-02-19T08:27:07.019176Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-02-19T08:27:07.019396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: ... ss permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-02-19T08:30:27.003342Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:27.003433Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711216 ProcessProposeKqpTransaction 2026-02-19T08:30:27.012443Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:27.012515Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711217 ProcessProposeKqpTransaction 2026-02-19T08:30:27.121240Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:27.121302Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711218 ProcessProposeKqpTransaction 2026-02-19T08:30:27.128439Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:27.128491Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711219 ProcessProposeKqpTransaction 2026-02-19T08:30:27.309577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:30:27.309750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:30:27.309823Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:978:2825];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:30:27.309886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:984:2829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;EXPECTATION=0 2026-02-19T08:30:37.419788Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:10989:10328], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:30:37.423140Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=Y2M5ZjcyYjItNWQyNTA3YzktYTg5ZDg0N2EtYTc4ZjJlZjY=, ActorId: [1:10984:10324], ActorState: ExecuteState, LegacyTraceId: 01khtgbgah4ysj1skfvq40n942, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-02-19T08:30:37.900481Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:37.900567Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711220 ProcessProposeKqpTransaction 2026-02-19T08:30:37.910978Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:37.911051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711221 ProcessProposeKqpTransaction 2026-02-19T08:30:38.038206Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:38.038280Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711222 ProcessProposeKqpTransaction 2026-02-19T08:30:38.046903Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:38.046973Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711223 ProcessProposeKqpTransaction 2026-02-19T08:30:38.231924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:30:38.232091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:30:38.232165Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:978:2825];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:30:38.232291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:984:2829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;EXPECTATION=0 2026-02-19T08:30:48.616968Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:48.617058Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711224 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;RESULT=;EXPECTATION=1 2026-02-19T08:30:49.187588Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:49.187657Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711225 ProcessProposeKqpTransaction 2026-02-19T08:30:49.197558Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:49.197627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711226 ProcessProposeKqpTransaction 2026-02-19T08:30:49.337299Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:49.337380Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711227 ProcessProposeKqpTransaction 2026-02-19T08:30:49.346938Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:49.347014Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711228 ProcessProposeKqpTransaction 2026-02-19T08:30:49.558062Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:30:49.558240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:30:49.558307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:978:2825];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:30:49.558367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:984:2829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;EXPECTATION=1 2026-02-19T08:30:59.928227Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:30:59.928299Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711229 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 2026-02-19T08:31:00.419165Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:31:00.419225Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711230 ProcessProposeKqpTransaction 2026-02-19T08:31:00.429076Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:31:00.429155Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711231 ProcessProposeKqpTransaction 2026-02-19T08:31:00.554690Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:31:00.554765Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711232 ProcessProposeKqpTransaction 2026-02-19T08:31:00.563312Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:70:2117] Handle TEvExecuteKqpTransaction 2026-02-19T08:31:00.563381Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:70:2117] TxId# 281474976711233 ProcessProposeKqpTransaction 2026-02-19T08:31:00.750268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:971:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:31:00.750419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:975:2822];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:31:00.750489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:978:2825];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:31:00.750549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:984:2829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 E0219 08:31:01.349828061 442439 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-02-19T08:31:01.349540774+00:00"} >> KqpScan::IsNull [GOOD] >> KqpScan::GrepRange >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] Test command err: 2026-02-19T08:30:47.003467Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490959745486257:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:47.005885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:47.047251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:47.315101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:47.315222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:47.327203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:47.353304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:47.403799Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490955450518921:2081] 1771489847000855 != 1771489847000858 2026-02-19T08:30:47.415091Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:47.466498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:47.466518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:47.466527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:47.466608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:47.600053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:47.937790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:48.005252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.014529Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:48.155497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.289263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.368634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.275762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490972630389982:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.275909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.276336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490972630389992:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.276382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.545771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.580828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.610441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.640020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.670863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.708158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.772987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.819877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.916192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490972630390875:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.916275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.916702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490972630390880:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.916761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490972630390881:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.916869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.920968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:50.931843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490972630390884:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:50.996061Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490972630390935:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:52.002888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490959745486257:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:52.002973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:52.417489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:54.890006Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:54.891805Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490990711409866:2081] 1771489854769445 != 1771489854769448 2026-02-19T08:30:54.900309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:54.910923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:54.911007Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:54.925362Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:54.977401Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:54.977424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:54.977431Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:54.977507Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:55.053099Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:55.368409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:55.387423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.442544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.616614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.676908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.800373Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:58.181866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491007891280927:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.181981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.189875Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491007891280937:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.189984Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.248104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.291017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.335367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.372368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.404978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.445001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.523524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.588626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.681970Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491007891281809:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.682071Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.682190Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491007891281814:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.682302Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491007891281816:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.682340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.685589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:58.696782Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491007891281817:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:58.769016Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491007891281869:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:59.770882Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608490990711409892:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:59.770941Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:00.226614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread ----------------------------------------------------------- 2026-02-19T08:31:01.193478Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489861232, txId: 281474976710676] shutting down >> KqpSplit::StreamLookupDeliveryProblem >> KqpScan::RightOnlyJoinSimple [GOOD] >> KqpScan::RightSemiJoinSimple >> KqpScan::StreamExecuteScanQueryCancelation >> KqpScan::TwoAggregatesOneFullFrameWindow >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> KqpSplit::AfterResolve+Ascending [GOOD] >> KqpRanges::ScanKeyPrefix [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> KqpPointConsolidation::TasksCount >> KqpRanges::CastKeyBounds [GOOD] >> KqpSplit::AfterResolve+Unspecified [GOOD] >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> KqpScan::TwoAggregatesTwoWindows [GOOD] >> KqpScan::UdfFailure >> KqpSplit::UndeliveryOnFinishedRead >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResolve+Ascending [GOOD] Test command err: 2026-02-19T08:30:51.614584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490977444461440:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:51.616966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:51.799515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:51.828430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:51.828536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:51.876804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:51.903357Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:51.904668Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490977444461409:2081] 1771489851612334 != 1771489851612337 2026-02-19T08:30:51.990884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:51.990909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:51.990923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:51.991025Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:52.012411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:52.441747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:52.501846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:52.622279Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:52.668113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:52.826258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:52.892866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.702528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490990329365174:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.702656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.703207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490990329365184:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:54.703265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.038019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.068350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.093568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.124699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.152481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.203789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.234580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.273938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.347977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994624333349:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.348062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.348541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994624333354:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.348574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994624333355:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.348632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.352481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:55.362981Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490994624333358:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:55.431362Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490994624333409:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:56.614319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490977444461440:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:56.614391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:57.059912Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491003214268305:2540], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2026-02-19T08:30:57.060274Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?no ... ndelivered;self_id=[2:7608491005572655401:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:58.040840Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:58.189140Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:58.189215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:58.192233Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:58.204363Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491001277687857:2081] 1771489857984822 != 1771489857984825 2026-02-19T08:30:58.213213Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:58.222966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:58.370356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:58.370382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:58.370390Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:58.370466Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:58.427768Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:58.779269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:58.810955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.875070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.028893Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:59.083020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.192091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.410643Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491018457558920:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.410733Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.411258Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491018457558930:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.411307Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.472546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.511874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.590763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.619185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.649059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.692804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.732399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.781952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.855604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491018457559799:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.855705Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.855944Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491018457559804:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.855993Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491018457559805:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.856074Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.859418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:01.868393Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491018457559808:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:01.941017Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491018457559859:3759] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:03.041811Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491005572655401:2272];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.041894Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:03.673851Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489863661, txId: 281474976715673] shutting down >> KqpNewEngine::IndexAutochooserAndLimitPushdown-AutoSelectIndex [GOOD] >> KqpNewEngine::FullScanCount >> KqpSplit::AfterResultMultiRangeSegmentPartition+Unspecified [GOOD] >> KqpSplit::BorderKeys+Ascending >> KqpFlowControl::FlowControl_Unlimited >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpMergeCn::TopSortBy_Interval64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: 2026-02-19T08:30:15.373673Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490822498413372:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:15.374978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:15.621403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:15.621522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:15.661967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:15.680758Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:15.683435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:15.683812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490822498413340:2081] 1771489815368075 != 1771489815368078 2026-02-19T08:30:15.802317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:15.802337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:15.802344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:15.802407Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:15.900986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:16.204970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:16.209675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:16.381244Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:18.373221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490835383316118:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.373311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.373578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490835383316128:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.373625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.644806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:18.744795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490835383316224:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.744894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.745020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490835383316229:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.745301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490835383316231:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.745356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:18.749541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:18.760623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490835383316232:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:30:18.873399Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490835383316284:2593] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:19.052655Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490839678283622:2355], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2026-02-19T08:30:19.053157Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MmFmOWFiOWYtNWM0Yzg0MjAtYTEyMTU1MTctMzExNDA0YTQ=, ActorId: [1:7608490835383316091:2325], ActorState: ExecuteState, LegacyTraceId: 01khtgaycqaq8m80j53vtn4yy9, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 13 } message: "Missing key column in input: Key for table: /Root/TestUpsertNotNullPk" end_position { row: 1 column: 13 } issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:19.081269Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490839678283631:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:30:19.082960Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MmFmOWFiOWYtNWM0Yzg0MjAtYTEyMTU1MTctMzExNDA0YTQ=, ActorId: [1:7608490835383316091:2325], ActorState: ExecuteState, LegacyTraceId: 01khtgaydq3wtkyk73h0zd9npa, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 13 } message: "At function: KiWriteTable!" end_position { row: 1 column: 13 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert type: Struct<\'Key\':Null,\'Value\':String> to Struct<\'Key\':Uint64,\'Value\':String?>" end_position { row: 1 column: 47 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert \'Key\': Null to Uint64" end_position { row: 1 column: 47 } severity: 1 } } issues { position { row: 1 column: 47 } message: "Failed to convert input columns types to scheme types" end_position { row: 1 column: 47 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:19.976411Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490840347682612:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:19.976538Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:19.998227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:20.083367Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:20.083454Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:20.083612Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:20.084180Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490840347682583:2081] 1771489819975318 != 1771489819975321 2026-02-19T08:30:20.114671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:20.194363Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:20.194386Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:20.194394Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:20.194466Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... uteState, LegacyTraceId: 01khtgbzz0ee7rfwd03taeqjrn, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# 2026-02-19T08:30:56.122912Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608491000787981780:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:56.123823Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:56.225835Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:56.303501Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:56.310373Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608491000787981712:2081] 1771489856109486 != 1771489856109489 2026-02-19T08:30:56.325276Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:56.325416Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:56.358642Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:56.513445Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:56.530378Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:56.530414Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:56.530426Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:56.530508Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:57.157584Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:57.280735Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:57.291727Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:57.308464Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.391319Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.567548Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.660063Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.607204Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491017967852767:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.607317Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.607618Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491017967852776:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.607712Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.709666Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.748661Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.785975Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.821225Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.866974Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.922706Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.970968Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.027452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.114735Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491000787981780:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:01.114832Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:01.123244Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491022262820942:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.123373Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.123395Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491022262820947:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.123635Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491022262820949:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.123768Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.127502Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:01.139862Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491022262820950:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:01.210144Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491022262821004:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResolve+Unspecified [GOOD] Test command err: 2026-02-19T08:30:52.154119Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490982263418648:2063];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:52.154181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:52.384740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:52.384875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:52.412235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:52.439110Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:52.441541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:52.442040Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490982263418625:2081] 1771489852153284 != 1771489852153287 2026-02-19T08:30:52.553443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:52.553494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:52.553509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:52.553589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:52.670319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:52.949259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:53.036204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.167037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:53.180439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.348410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.416715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.219474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490995148322393:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.219619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.219979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490995148322403:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.220019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.537688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.567539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.600908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.632169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.666861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.704798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.765819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.810326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.893465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490995148323273:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.893549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.893887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490995148323278:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.893928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490995148323279:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.893963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.897366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:55.909744Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490995148323282:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:56.014239Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490999443290629:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:57.154176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490982263418648:2063];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:57.154232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:30:57.721230Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_m ... se.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:58.656201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:58.729983Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:58.731230Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491009089305996:2081] 1771489858639240 != 1771489858639243 2026-02-19T08:30:58.761570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:58.761627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:58.763413Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:58.808071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:58.808091Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:58.808100Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:58.808168Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:58.928498Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:59.229229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:59.235415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:59.251102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.314745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.501367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.568844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.695663Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:01.753056Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491021974209756:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.753116Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.753372Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491021974209765:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.753398Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.819897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.894134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.924902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.970206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.000302Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.077297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.117063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.176588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.264806Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491026269177934:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.264901Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.265186Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491026269177939:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.265232Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491026269177940:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.265340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.269412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:02.282567Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491026269177943:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:02.373939Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491026269177994:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:03.641820Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491009089306040:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.641888Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:04.050332Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489864004, txId: 281474976710673] shutting down >> KqpScan::UnionThree [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> KqpScan::SecondaryIndexCustomColumnOrder [GOOD] >> KqpScan::SelectExistsUnexpected >> KqpKv::ReadRows_Nulls [GOOD] >> KqpScan::GrepByString [GOOD] >> KqpScan::GrepLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionThree [GOOD] Test command err: 2026-02-19T08:30:52.963883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490979957610569:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:52.963936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:53.204840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:53.239000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:53.239097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:53.293269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:53.332024Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:53.337631Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490979957610538:2081] 1771489852962376 != 1771489852962379 2026-02-19T08:30:53.444983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:53.445003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:53.445009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:53.445085Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:53.499974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:53.925287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:53.940506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:53.972381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:53.994771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.117292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.291816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.376593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.324444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490997137481608:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:56.324561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:56.325842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490997137481618:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:56.325900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:56.724069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.760965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.803458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.843694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.881033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.947951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.009588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.062214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.143830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491001432449789:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.143918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.146042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491001432449794:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.146109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491001432449795:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.146274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:57.150398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:57.163587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491001432449798:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:57.268261Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491001432449849:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:57.964703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490979957610569:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:57.964767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:58.940758Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489858934, txId: 281474976710673] shutting down 2026-02-1 ... 6-02-19T08:30:59.978288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:00.062621Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:00.064817Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491012679493789:2081] 1771489859956155 != 1771489859956158 2026-02-19T08:31:00.099700Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:00.099780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:00.103438Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:00.166237Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:00.166264Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:00.166273Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:00.166344Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:00.173988Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:00.597224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:00.615277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.674756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.812704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.871768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.993027Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:03.168882Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491029859364847:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.168956Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.169402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491029859364856:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.169467Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.239765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.273627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.306373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.341627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.368956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.418183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.454251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.501482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.583404Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491029859365728:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.583477Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.583574Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491029859365733:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.583755Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491029859365735:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.583822Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.586939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:03.597867Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491029859365737:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:03.679407Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491029859365788:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:04.957842Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491012679493820:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:04.957903Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:05.695287Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608491038449300789:2059], tablet: [2:7608491016974462093:2319], scanId: 3, table: /Root/EightShard 2026-02-19T08:31:05.695463Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608491038449300795:2061], tablet: [2:7608491016974462092:2318], scanId: 4, table: /Root/EightShard 2026-02-19T08:31:05.697608Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489865726, txId: 281474976710673] shutting down 2026-02-19T08:31:05.697882Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7608491038449300799:2062], tablet: [2:7608491016974462091:2317], scanId: 5, table: /Root/EightShard >> KqpScan::DropRedundantSortByPk >> KqpNewEngine::DeleteByKey [GOOD] >> KqpSplit::StreamLookupJoinDeliveryProblemAfterFirstResult >> KqpScan::PrunePartitionsByLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Nulls [GOOD] Test command err: 2026-02-19T08:30:12.381156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490810957940086:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:12.381320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:12.650358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:12.650473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:12.693496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:12.712151Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:12.715169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490810957940050:2081] 1771489812379307 != 1771489812379310 2026-02-19T08:30:12.725122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:12.814822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:12.814863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:12.814874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:12.814961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:12.849819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:13.219818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:13.231419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:13.388144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:15.282145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490823842842832:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:15.282255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:15.282692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490823842842842:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:15.282757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:15.560213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) IsSuccess(): 1 GetStatus(): SUCCESS 2026-02-19T08:30:16.541651Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490828954054495:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:16.541724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:16.583285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:16.679473Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:16.684379Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490828954054446:2081] 1771489816540485 != 1771489816540488 2026-02-19T08:30:16.701141Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:16.701242Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:16.715579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:16.795492Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:16.803324Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:16.803346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:16.803353Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:16.803426Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:17.227766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:17.234557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:17.573837Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:19.725015Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490841838957223:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:19.725108Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:19.726743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490841838957233:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:19.726815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:19.748288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:19.796508Z node 2 :RPC_REQUEST ERROR: rpc_read_rows.cpp:813: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' 2026-02-19T08:30:20.554927Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608490842685000741:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:20.554986Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:20.591386Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:20.666758Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:20.668235Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608490842685000696:2081] 1771489820552627 != 1771489820552630 2026-02-19T08:30:20.694668Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:20.694758Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:20.705876Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:20.794565Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:20.794582Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:20.794588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:20.794649Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:20.839626Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:21.289562Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdoma ... 519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037935 not found 2026-02-19T08:30:54.404109Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.498910Z node 6 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037936 not found 2026-02-19T08:30:54.505124Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.598365Z node 6 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037937 not found 2026-02-19T08:30:54.607182Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.729458Z node 6 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037938 not found 2026-02-19T08:30:54.734724Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:54.824645Z node 6 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037939 not found 2026-02-19T08:30:55.823643Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608490996692934730:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:55.830916Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:55.870668Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:55.967628Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:55.970592Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608490996692934688:2081] 1771489855793850 != 1771489855793853 2026-02-19T08:30:55.980865Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:55.980938Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:56.005690Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:56.047930Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:56.098333Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:56.098364Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:56.098374Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:56.098466Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:56.728896Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:56.742062Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:56.842560Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:00.356779Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.515455Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-02-19T08:31:00.527575Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.634735Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-02-19T08:31:00.635110Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.725580Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-02-19T08:31:00.823760Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608490996692934730:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:00.823836Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:01.803395Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491018471943655:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:01.803462Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:01.822394Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:01.927363Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:01.930119Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491018471943620:2081] 1771489861802362 != 1771489861802365 2026-02-19T08:31:01.943621Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:01.943747Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:01.997641Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:02.017740Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:02.054872Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:02.054896Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:02.054906Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:02.054989Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:02.619026Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:02.634105Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:02.809754Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:06.132596Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491039946780993:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.132707Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.133099Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491039946781002:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.133179Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.160000Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) IsSuccess(): 1 GetStatus(): SUCCESS >> KqpNotNullColumns::AlterAddIndex [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime64_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 >> KqpReturning::ReturningDeleteUpdate+UseSink [GOOD] >> KqpReturning::ReturningDeleteUpdate-UseSink >> KqpScan::GrepRange [GOOD] >> KqpScan::GrepNonKeyColumns >> KqpSort::PassLimit [GOOD] >> KqpSort::OffsetPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteByKey [GOOD] Test command err: 2026-02-19T08:30:05.409031Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490781296681021:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:05.409120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:05.651762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:05.651952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:05.691400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:05.710302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:05.738099Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:05.739549Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490781296680986:2081] 1771489805407316 != 1771489805407319 2026-02-19T08:30:05.818499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:05.818567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:05.818578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:05.818683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:05.901629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:06.243162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:06.260412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:06.302071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.416015Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:06.430132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.574377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.641653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.501006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490794181584758:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.501125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.502286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490794181584768:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.502354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.825528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.857460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.885916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.916261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.946083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.978600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.042663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.078937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.158946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490798476552936:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.159049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.159393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490798476552941:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.159440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490798476552942:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.159496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.164077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:09.173878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490798476552945:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:09.246309Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490798476552996:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:10.409051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490781296681021:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:10.409130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:12.284484Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490809261219877:2070];send_to=[0:7307199536658146131:7762515 ... TADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491006189887053:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:58.318779Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:58.338959Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:58.452037Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:58.452225Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:58.453251Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:58.457943Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491006189887024:2081] 1771489858316570 != 1771489858316573 2026-02-19T08:30:58.480144Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:58.498567Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:58.614308Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:58.614332Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:58.614343Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:58.614440Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:59.096723Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:59.328759Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:59.336428Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:59.344358Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:59.354513Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.427631Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.622468Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.719247Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.906511Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491023369758091:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.906611Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.906891Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491023369758101:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.906932Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.996693Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.038490Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.084075Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.122249Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.171545Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.218270Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.259743Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.319124Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491006189887053:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.319195Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:03.324207Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.418489Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491027664726270:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.418620Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.418850Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491027664726275:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.418963Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491027664726276:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.419031Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.423111Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:03.438816Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491027664726279:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:03.521557Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491027664726331:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNewEngine::JoinSameKey [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate >> KqpScan::CountDistinct >> KqpSplit::UndeliveryOnFinishedRead [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: 2026-02-19T08:30:07.830561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490786384499411:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:07.833962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:08.059108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:08.076840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:08.076938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:08.137081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:08.144537Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:08.146225Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490786384499360:2081] 1771489807826933 != 1771489807826936 2026-02-19T08:30:08.239857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:08.239887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:08.239895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:08.240004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:08.323357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:08.640305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:08.695641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.826982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.917270Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:08.975195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.046064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:10.859792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490799269403124:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.859907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.860318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490799269403134:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.860367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.206773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.233302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.305620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.335891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.367577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.401550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.456534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.500438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.583741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490803564371302:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.583849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.583917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490803564371307:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.584076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490803564371309:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.584141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.587369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:11.600242Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490803564371310:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:11.689956Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490803564371362:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:12.829625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490786384499411:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:12.829701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:14.298983Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490820470298482:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:14.299530Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... meshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:00.144121Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491012221071902:2081] 1771489860004338 != 1771489860004341 2026-02-19T08:31:00.156627Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:00.156739Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:00.182960Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:00.254484Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:00.254518Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:00.254529Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:00.254642Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:00.310400Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:00.887375Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:00.900853Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.977035Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.103695Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:01.175151Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.261625Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.601887Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491033695910263:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.602030Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.602441Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491033695910273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.602508Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.698352Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.749171Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.801257Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.854154Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.915942Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.986366Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.012091Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491016516039240:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:05.012268Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:05.047233Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.115324Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.228363Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491037990878442:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.228489Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.228763Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491037990878447:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.228809Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.228845Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491037990878448:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.234922Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:05.249194Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491037990878451:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:05.345183Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491037990878502:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:07.653123Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.754997Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.803348Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::DqSourceLiteralRange >> KqpScan::RightSemiJoinSimple [GOOD] >> KqpSplit::StreamLookupJoinSplitBeforeReading >> KqpScan::CrossJoin [GOOD] >> KqpScan::CrossJoinOneColumn >> KqpScan::TwoAggregatesOneFullFrameWindow [GOOD] >> KqpScan::TopSortOverSecondaryIndexRead >> KqpSplit::StreamLookupDeliveryProblem [GOOD] >> KqpSplit::StreamLookupJoinDeliveryProblem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: 2026-02-19T08:30:06.499371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490782635702888:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:06.499677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:06.719833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:06.747129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:06.747248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:06.820445Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:06.825279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:06.919263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:06.919292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:06.919303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:06.919385Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:06.986858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:07.289890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:07.299240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:07.351811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:07.476730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:07.565479Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:07.610787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:07.670285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.446178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490795520606581:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.446315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.446848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490795520606591:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.446901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.763019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.790694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.820147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.852354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.882896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.919033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.953902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:10.003411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:10.091923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490799815574760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.092022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.092360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490799815574765:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.092418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490799815574766:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.092467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:10.095612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:10.106844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490799815574769:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:10.210411Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490799815574820:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:11.497898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490782635702888:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:11.497952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:11.559310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.778843Z node 1 :READ_TAB ... de 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608490989748877806:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:00.629486Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491016382333560:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:00.629583Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:00.668225Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:00.838274Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:00.862489Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:00.864924Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:00.865040Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:00.882045Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:00.953223Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:00.953249Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:00.953261Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:00.953375Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:01.432483Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:01.588572Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:01.607800Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.648996Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:01.680943Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.893208Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.989577Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.493571Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491037857171864:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.493688Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.494022Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491037857171873:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.494095Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.588598Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.628810Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491016382333560:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:05.628879Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:05.635121Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.679913Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.719621Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.754465Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.798303Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.856662Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.922347Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.028544Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491042152140045:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.028672Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.028980Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491042152140050:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.029036Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491042152140051:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.029169Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.033840Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:06.053018Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491042152140054:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:06.153061Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491042152140105:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::RightSemiJoinSimple [GOOD] Test command err: 2026-02-19T08:30:49.666282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490970806254222:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:49.666342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:50.025906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:50.060950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:50.061283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:50.127626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:50.167724Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:50.172937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490970806254178:2081] 1771489849664656 != 1771489849664659 2026-02-19T08:30:50.273539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:50.322320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:50.322340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:50.322346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:50.322425Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:50.682800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:50.707013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:50.770926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.917253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:51.069962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:51.141811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.056114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490987986125240:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.056221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.057790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490987986125250:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.057853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.335862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.383830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.418292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.455553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.492133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.532092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.571977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.649244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.722476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490987986126120:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.722545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.722635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490987986126125:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.722911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490987986126127:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.722963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:53.726799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:53.739289Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490987986126128:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:53.801014Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490987986126180:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:54.666352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490970806254222:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:54.666427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:55.258742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.930120Z no ... ns;error=scheme_cache_undelivered_message; 2026-02-19T08:31:03.766782Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:03.901776Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:03.904364Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:03.904444Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:03.914665Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:03.917965Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491029789264419:2081] 1771489863753367 != 1771489863753370 2026-02-19T08:31:03.940853Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:03.992513Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:03.992535Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:03.992543Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:03.992613Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:04.091579Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:04.384037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:04.401044Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.489594Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.637503Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.712593Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.872159Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:07.170949Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491046969135478:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.171044Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.171506Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491046969135488:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.171563Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.251898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.286193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.331153Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.374600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.430677Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.489842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.531551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.580577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.672277Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491046969136354:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.672378Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.672797Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491046969136359:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.672878Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491046969136360:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.672983Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.677315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:07.690907Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491046969136363:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:07.792554Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491046969136414:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:08.755064Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491029789264452:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.755149Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:09.236868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.064061Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489870094, txId: 281474976710675] shutting down >> KqpScan::StreamExecuteScanQueryCancelation [GOOD] >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce >> KqpScan::NullInKey >> KqpNewEngine::StaleRO-EnableFollowers [GOOD] >> KqpNewEngine::StaleRO_Immediate >> KqpSplit::BorderKeys+Ascending [GOOD] >> KqpPointConsolidation::TasksCount [GOOD] >> KqpPointConsolidation::ReadRanges >> KqpScan::JoinSimple >> KqpNamedExpressions::NamedExpressionRandomInsert-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomDataQuery+UseSink >> KqpFlowControl::FlowControl_Unlimited [GOOD] >> KqpFlowControl::FlowControl_BigLimit >> KqpScan::EarlyFinish [GOOD] >> KqpScan::Effects >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue >> KqpSplit::AfterResult+Unspecified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::BorderKeys+Ascending [GOOD] Test command err: 2026-02-19T08:30:58.864453Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491008795102448:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:58.864534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:59.061190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:59.102132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:59.102244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:59.149301Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:59.152095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491008795102267:2081] 1771489858830066 != 1771489858830069 2026-02-19T08:30:59.168152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:59.281471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:59.286483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:59.286512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:59.286525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:59.286646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:59.659488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:59.666674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:59.710056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.832003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.943482Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:59.989756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.066287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.972201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491021680006050:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.972321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.972797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491021680006060:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.972850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.323160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.359871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.391318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.423019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.457668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.494346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.537360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.584796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:02.664985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491025974974229:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.665075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.665393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491025974974234:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.665429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.665441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491025974974235:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:02.669103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:02.679368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:31:02.679602Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491025974974238:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:02.772158Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491025974974289:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:03.869823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491008795102448:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.869887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------- ... cessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:06.374037Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:06.439348Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:06.465466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:06.465562Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:06.476531Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:06.547105Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:06.547126Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:06.547134Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:06.547206Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:06.626779Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:06.972142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:06.982624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:06.999492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.064049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.249418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.317412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.422207Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:09.387647Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491055624012867:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.387738Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.387959Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491055624012876:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.388035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.456513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.489903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.527576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.559835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.596915Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.624641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.662068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.710183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.786825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491055624013747:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.786950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.787219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491055624013752:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.787271Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491055624013753:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.787389Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.791546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:09.800921Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491055624013756:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:09.872064Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491055624013807:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:11.342215Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491042739109173:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.342268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:11.975341Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489871557, txId: 281474976715673] shutting down >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> KqpScan::GrepLimit [GOOD] >> KqpScan::AggregateCountStar >> KqpScan::SelectExistsUnexpected [GOOD] >> KqpScan::PrunePartitionsByLiteral [GOOD] >> KqpScan::PureExpr >> KqpSplit::StreamLookupJoinDeliveryProblemAfterFirstResult [GOOD] >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> KqpScan::DropRedundantSortByPk [GOOD] >> KqpScan::DqSourceFullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepLimit [GOOD] Test command err: 2026-02-19T08:30:55.241337Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490993771519514:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:55.243681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:55.485131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:55.500807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:55.500928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:55.552930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:55.583584Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:55.585764Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490993771519463:2081] 1771489855226944 != 1771489855226947 2026-02-19T08:30:55.699253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:55.720660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:55.720686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:55.720691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:55.721072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:56.195626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:56.246195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:56.249908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:56.297943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.442220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.618102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:56.691649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.559136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491006656423231:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.559237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.559686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491006656423241:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.559738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:58.915970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.949356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.982025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.017824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.057319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.108063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.154915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.210007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.306726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491010951391405:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.306829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.307138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491010951391410:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.307193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491010951391411:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.307270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.310935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:59.321277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491010951391414:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:59.404936Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491010951391465:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:00.230745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490993771519514:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:00.235633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:01.156004Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489861190, txId: 281474976710673] shutting down 2026-02-1 ... ing down 2026-02-19T08:31:08.242030Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491050461857745:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.246697Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:08.261861Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:08.353512Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:08.355114Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491050461857711:2081] 1771489868239740 != 1771489868239743 2026-02-19T08:31:08.370558Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:08.370638Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:08.394525Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:08.442255Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:08.442278Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:08.442285Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:08.442362Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:08.456530Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:08.845567Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:08.850633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:08.856499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.901018Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.028047Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.083401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.249536Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:11.413056Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491063346761472:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.413126Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.413345Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491063346761482:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.413384Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.482676Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.522242Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.606925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.647737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.685862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.731303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.769995Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.826877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.915594Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491063346762355:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.915684Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.915977Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491063346762360:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.916022Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491063346762361:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.916106Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.919976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:11.934725Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491063346762364:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:12.005923Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491067641729711:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:13.242043Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491050461857745:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:13.242121Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:13.925881Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489873965, txId: 281474976710673] shutting down >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex+UseSink-UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex-UseSink+UseDataQuery >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpScan::GrepNonKeyColumns [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending >> KqpBatchDelete::ManyPartitions_2 [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::Offset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelectExistsUnexpected [GOOD] Test command err: 2026-02-19T08:30:47.045984Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490961945754194:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:47.052634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:47.316438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:47.321137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:47.321268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:47.376395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:47.388617Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:47.409917Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490961945754157:2081] 1771489847042825 != 1771489847042828 2026-02-19T08:30:47.487534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:47.487557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:47.487562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:47.487633Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:47.520434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:47.937749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:47.945259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:48.003165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.065055Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:48.167073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.313382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:48.379488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.400011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490974830657926:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.400158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.407318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490974830657936:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.407442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:50.703988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.734446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.765988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.802189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.833500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.870609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.928554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:50.974259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:51.047726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490979125626103:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:51.047807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:51.048017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490979125626108:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:51.048021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490979125626109:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:51.048053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:51.051339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:51.064319Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490979125626112:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:51.167994Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490979125626165:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:52.045816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490961945754194:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:52.045892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:52.675442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... 8:31:07.368802Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491046995255222:2081] 1771489867205636 != 1771489867205639 2026-02-19T08:31:07.381857Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:07.382388Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:07.382467Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:07.407297Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:07.494416Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:07.494441Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:07.494449Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:07.494524Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:07.601813Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:08.028068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:08.039332Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:08.050838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.116631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.217864Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:08.253214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.338842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.891026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491059880158986:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.891117Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.891549Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491059880158996:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.891597Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.960021Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.002321Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.039858Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.113521Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.152010Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.186605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.231180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.283882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.378121Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491064175127167:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.378209Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.378490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491064175127173:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.378513Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491064175127172:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.378572Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.382074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:11.394384Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491064175127176:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:11.472048Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491064175127227:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:12.210123Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491046995255250:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:12.210207Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:13.084592Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.941229Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489873671, txId: 281474976710675] shutting down 2026-02-19T08:31:14.461252Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489874196, txId: 281474976710678] shutting down >> KqpNewEngine::FullScanCount [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: 2026-02-19T08:30:02.592993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490767083852951:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:02.593072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:02.783627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:02.812540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:02.812643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:02.870561Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490767083852917:2081] 1771489802591383 != 1771489802591386 2026-02-19T08:30:02.875874Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:02.886100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:03.007772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:03.007790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:03.007802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:03.007922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:03.061204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:03.404289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:03.462942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.593150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.612487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:03.748225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:03.821387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:05.653169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490779968756682:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:05.653269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:05.653562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490779968756692:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:05.653595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.026976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.057135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.087649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.118089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.149672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.186811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.253590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.298884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.385493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490784263724856:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.385566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.385823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490784263724861:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.385823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490784263724862:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.385857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:06.389383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:06.400436Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490784263724865:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:06.455068Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490784263724916:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:07.593040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490767083852951:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:07.593117Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:09.106105Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490797358922363:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:09.106169Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... pers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491022405184455:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:01.725476Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491022405184506:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:05.987991Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491036328598067:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:05.988460Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:06.027962Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:06.192537Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:06.208304Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:06.208403Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:06.238837Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:06.270952Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:06.320500Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:06.320529Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:06.320544Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:06.320648Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:06.995576Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:07.099881Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:07.127004Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.222591Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.453802Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.592628Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.938317Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491057803436361:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.938436Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.938767Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491057803436370:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.938826Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.989887Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491036328598067:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:10.989986Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:11.035863Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.082782Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.130556Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.177156Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.221603Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.290216Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.337277Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.398656Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.516659Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062098404541:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.516769Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.517207Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062098404546:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.517264Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062098404547:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.517435Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.524157Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:11.541148Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491062098404550:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:11.626310Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491062098404601:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> ColumnShardTiers::DSConfigsStub [GOOD] >> KqpScan::CountDistinct [GOOD] >> KqpScan::BoolFlag >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] >> KqpSplit::StreamLookupJoinSplitBeforeReading [GOOD] >> KqpSplit::StreamLookupJoinSplitAfterFirstResult >> KqpSplit::StreamLookupJoinDeliveryProblem [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepNonKeyColumns [GOOD] Test command err: 2026-02-19T08:30:56.486189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490998027233886:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:56.486253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:56.851633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:56.855844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:56.855948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:56.932580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:56.945286Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:56.947949Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490998027233843:2081] 1771489856484383 != 1771489856484386 2026-02-19T08:30:57.106365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:57.116255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:57.116278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:57.116284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:57.116411Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:57.503086Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:57.577621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:57.584576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:57.650774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.796846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:57.981537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:58.070096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.891939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491010912137606:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.892072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.892376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491010912137616:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:59.892435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.190136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.227746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.260883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.291724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.333169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.369661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.416890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.487140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:00.559589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491015207105787:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.559715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.559944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491015207105792:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.559977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491015207105793:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.560015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:00.563263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:00.572909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491015207105796:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:00.651411Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491015207105847:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:01.486345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490998027233886:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:01.486434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:02.102670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... ing down 2026-02-19T08:31:09.876510Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491056054123225:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:09.876572Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:09.893422Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:09.992363Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:09.997821Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491056054123186:2081] 1771489869875135 != 1771489869875138 2026-02-19T08:31:10.005921Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:10.006007Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:10.025345Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:10.088355Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:10.088379Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:10.088389Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:10.088460Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:10.129329Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:10.441509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:10.447885Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:10.459717Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.513384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.640642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.706980Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.888922Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:13.160649Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491073233994237:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.160738Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.160977Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491073233994247:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.161017Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.219455Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.252764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.290712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.336911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.368390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.412184Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.460140Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.506259Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.594510Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491073233995116:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.594586Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.594835Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491073233995121:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.594872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491073233995122:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.594963Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.599200Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:13.609632Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491073233995125:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:13.662519Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491073233995176:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:14.878028Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491056054123225:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:14.878109Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:15.700639Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489875736, txId: 281474976715673] shutting down >> KqpScan::UdfFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] Test command err: 2026-02-19T08:30:08.325754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490792746896460:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:08.328238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:08.518886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:08.533030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:08.533153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:08.597795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:08.600064Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:08.603344Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490792746896408:2081] 1771489808323648 != 1771489808323651 2026-02-19T08:30:08.689116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:08.689140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:08.689168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:08.689260Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:08.714653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:09.112563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:09.178177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.320600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.335492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:09.490802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.561613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.315885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490805631800167:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.315989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.317142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490805631800177:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.317186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:11.660692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.688989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.713864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.738300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.767498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.823500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.857141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:11.929262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:12.005460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490809926768354:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.005559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.005699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490809926768359:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.005776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490809926768361:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.005834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:12.009155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:12.020165Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490809926768363:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:12.091219Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490809926768414:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:13.326028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490792746896460:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:13.326107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:13.602994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:14.715414Z no ... 4976710675] shutting down 2026-02-19T08:31:06.548661Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491040742735456:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:06.548731Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:06.597807Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:06.766276Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:06.770770Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:06.770885Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:06.786391Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:06.884505Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:06.928308Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:06.928336Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:06.928347Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:06.928448Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:07.553033Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:07.647169Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:07.667877Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.816760Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.006855Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.103088Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.186726Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062217573766:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.186845Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.187385Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062217573775:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.187472Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.289974Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.344610Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.405356Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.448050Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.493184Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.548055Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.549879Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491040742735456:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.549953Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:11.608494Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.671997Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.828067Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062217574648:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.828248Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.830073Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062217574653:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.830147Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062217574654:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.830353Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.843332Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:11.861311Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491062217574657:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:11.939314Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491062217574708:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:14.168175Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.437817Z node 8 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489875470, txId: 281474976710675] shutting down >> KqpReturning::ReturningDeleteUpdate-UseSink [GOOD] >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort >> KqpScan::DecimalColumn >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:29:32.189547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:29:32.260372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:29:32.274519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:29:32.274592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:29:32.274975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:29:32.518387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:32.518508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:32.573350Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489770177437 != 1771489770177441 2026-02-19T08:29:32.579358Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:32.619816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:32.702520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:29:33.148040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:33.148105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:33.148139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:33.150726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:33.151479Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:33.165998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:33.305620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-02-19T08:29:33.442746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:33.443047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:33.443304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:33.443424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:33.443523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:33.443639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:33.443782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:33.443924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:33.444041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:33.444159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:29:33.444301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:29:33.444434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:29:33.444590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:29:33.468944Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-02-19T08:29:33.511490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:33.511611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:33.511852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:33.511992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:33.512102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:33.512234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:33.512343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:33.512480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:33.512600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:33.512733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:29:33.512841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:29:33.512942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:29:33.513073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:29:33.516439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:29:33.516552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:29:33.516686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:29:33.516732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:29:33.516991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:29:33.517041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:04.267295Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:04.267331Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2026-02-19T08:31:04.267361Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:04.267387Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2026-02-19T08:31:04.267417Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2026-02-19T08:31:04.267452Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037888 2026-02-19T08:31:04.267497Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:04.267528Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:04.267552Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2026-02-19T08:31:04.267577Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:04.267599Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2026-02-19T08:31:04.267623Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2026-02-19T08:31:04.267653Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037889 2026-02-19T08:31:04.267704Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:04.267858Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:04.267888Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2026-02-19T08:31:04.267915Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:04.267937Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2026-02-19T08:31:04.267964Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2026-02-19T08:31:04.267995Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037890 2026-02-19T08:31:04.268030Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:04.268977Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:04.269078Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:04.269150Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:969:2820];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2026-02-19T08:31:15.601052Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-02-19T08:31:15.601531Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:15.601592Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-02-19T08:31:15.601644Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.601680Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.601788Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:15.601904Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-02-19T08:31:15.601948Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-02-19T08:31:15.601984Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-02-19T08:31:15.602413Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:15.602460Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2026-02-19T08:31:15.602493Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602520Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602566Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:15.602601Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:15.602629Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2026-02-19T08:31:15.602657Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602683Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602721Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:15.602753Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:15.602779Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2026-02-19T08:31:15.602806Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602833Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:15.602869Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:15.603117Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:15.603215Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:15.603288Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:969:2820];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 2026-02-19T08:31:16.491173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:31:16.491255Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: 2026-02-19T08:30:05.981752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490777850635950:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:05.982306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:06.217833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:06.246747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:06.246858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:06.301779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490777850635872:2081] 1771489805964620 != 1771489805964623 2026-02-19T08:30:06.312643Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:06.318194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:06.399283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:06.424258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:06.424296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:06.424306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:06.424406Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:06.843400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:06.893459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:06.981588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:07.017789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:07.144361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:07.195918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:08.901807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490790735539634:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.901922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.902270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490790735539644:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:08.902320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.237237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.270079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.306245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.338073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.369508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.433547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.474024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.519902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:09.593830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490795030507812:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.593906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.593991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490795030507817:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.594107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490795030507819:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.594154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:09.597103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:09.607416Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490795030507821:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:09.698315Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490795030507872:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:10.972251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490777850635950:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:10.974185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:12.277777Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490810634696740:2186];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:12.277910Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... 1771489866188229 2026-02-19T08:31:06.364521Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:06.364609Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:06.433842Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:06.445434Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:06.515519Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:06.515551Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:06.515563Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:06.515650Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:06.808721Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:07.205600Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:07.219805Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:07.230023Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:07.245694Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.329423Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.555532Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.666923Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.064587Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062724187445:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.064694Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.065151Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062724187455:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.065208Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.159983Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.190482Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491041249349149:2098];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.190579Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:11.207156Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.285614Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.330650Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.372311Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.416618Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.461602Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.537009Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.698084Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062724188334:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.698195Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.698507Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062724188339:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.698545Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491062724188340:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.698613Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.703207Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:11.715179Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491062724188343:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:11.804201Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491062724188394:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At lambda, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At lambda, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KqpScan::AggregateNoColumn >> KqpScan::NullInKey [GOOD] >> KqpScan::NullInKeySuffix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:31:08.004525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:08.122035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:31:08.140788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:08.140871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:31:08.141417Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:31:08.440133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:08.440264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:08.518293Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489865473482 != 1771489865473486 2026-02-19T08:31:08.527397Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:08.580014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:08.679499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:31:08.955449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:08.969799Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:09.271678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:869:2751], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.271791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.272065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:887:2756], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.272112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.282448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.547604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:971:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.547739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.548079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:975:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.548138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.548211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:977:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.552722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:09.681421Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:980:2829], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:31:09.758721Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1036:2866] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:09.903414Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:152;event=channel_info;ch_size=8388608;ch_count=1;ch_limit=8388608;inputs=0;input_channels_count=0; 2026-02-19T08:31:09.903844Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:144: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Start compute actor [1:1063:2818], task: 1 2026-02-19T08:31:09.903900Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:155: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Set execution timeout 299.429638s 2026-02-19T08:31:09.909172Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\032\n\n/Root/Test\020\200\202\224\204\200\200\200\200\001\030&(\001\"\t\n\003Key\020\001 \004*\t\n\003Key\020\001 \004*\014\n\005Value\020\002 \201 0\214\247\200\200\200\200@8\001@\000H\000R\022\t#\004\000\000\000\000\000\000\021\002\013\000\000\001\000\000\000X\000`\000h\000h\001x\000\262\001\000" } } } 2026-02-19T08:31:09.909541Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:31:09.913221Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-02-19T08:31:09.913443Z node 1 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-02-19T08:31:09.913593Z node 1 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5363: TxId: 281474976715660, task: 1. Add data: 72 / 72 2026-02-19T08:31:09.913682Z node 1 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5331: TxId: 281474976715660, task: 1. Send data=72, closed=1, bufferActorId=[1:1059:2818] 2026-02-19T08:31:09.913764Z node 1 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 72 2026-02-19T08:31:09.913808Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2203: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY5MTEtODc4OWRlMjY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-02-19T08:31:09.913915Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:1063:2818], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcfqb1w39pag9nxe9esv0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWE4ODQ5NzMtOWVkOTlkMDEtOWE0OTY ... t_executions 2026-02-19T08:31:11.001926Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:11.002052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:11.006975Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491059928223812:2081] 1771489870820540 != 1771489870820543 2026-02-19T08:31:11.016010Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:11.017111Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:11.117440Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:11.155511Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:11.155540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:11.155548Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:11.155655Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:11.723397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:11.729791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:11.744378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.833830Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:11.837913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.004840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.118859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.046501Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077108094870:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.046596Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.046891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077108094880:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.046935Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.158508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.209536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.239335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.268352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.295940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.325075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.358845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.411459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.494638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077108095752:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.494693Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.494966Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077108095756:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.494999Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.495185Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077108095759:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.499814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:14.515015Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491077108095761:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:14.572249Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491077108095812:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:15.823799Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491059928223868:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:15.823855Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.123555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710678 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:17.245070Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489877220, txId: 281474976710676] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinDeliveryProblem [GOOD] Test command err: 2026-02-19T08:31:03.719930Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491028106471076:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.719978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:04.036174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:04.036312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:04.067141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:04.073797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:04.104287Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491028106471045:2081] 1771489863718639 != 1771489863718642 2026-02-19T08:31:04.112476Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:04.216497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:04.216521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:04.216527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:04.216631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:04.294561Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:04.633113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:04.712456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.737844Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:04.876966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.054417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.131521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.911845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491040991374814:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.911971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.912279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491040991374824:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.912328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.313176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.351590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.390034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.434969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.470291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.515313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.591185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.651682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.738354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045286342997:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.738429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.738781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045286343003:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.738815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045286343002:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.738823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.742255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:07.752325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491045286343006:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:07.821799Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491045286343057:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:08.720099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491028106471076:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.720161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:09.420491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread -------------- ... :11.505527Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:11.516212Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:11.610887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:11.610964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:11.613966Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491064574966494:2081] 1771489871462265 != 1771489871462268 2026-02-19T08:31:11.625889Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:11.636745Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:11.708763Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:11.708779Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:11.708787Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:11.708845Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:11.738034Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:12.102720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:12.107643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:12.117287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.169570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.290477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.352927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.512381Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:14.661866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077459870257:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.661979Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.666570Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077459870267:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.666668Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.732245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.766122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.835749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.875083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.918735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.964256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.013982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.067776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.163364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081754838434:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.163462Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.163855Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081754838439:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.163897Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081754838440:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.163970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.168063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:15.183674Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491081754838443:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:15.276276Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491081754838494:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:16.471312Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491064574966600:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:16.471386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.860229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread ----------------------------------------------------------- ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UdfFailure [GOOD] Test command err: 2026-02-19T08:30:57.920228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491002581541689:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:57.921747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:58.219841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:58.219924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:58.245970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:58.264585Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:58.266018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:58.268751Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491002581541638:2081] 1771489857910328 != 1771489857910331 2026-02-19T08:30:58.450034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:58.454285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:58.454311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:58.454317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:58.454391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:58.903100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:58.930049Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:59.017010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.192048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.351859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:59.420079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.176248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491019761412694:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.176374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.176775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491019761412704:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.176819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.451852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.487645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.525060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.558680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.589781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.623945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.660093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.763486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.840726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491019761413582:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.840812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.841124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491019761413587:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.841164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491019761413588:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.841277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:01.845627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:01.858976Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491019761413591:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:01.931185Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491019761413642:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:02.913826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491002581541689:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:02.913903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:04.252164Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489864263, txId: 281474976710673] shutting down 2026-02-19T08:31:05.244012Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491036434134981:2071];send_to=[0:730719953 ... VE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:11.932922Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:12.018491Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:12.018515Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:12.018522Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:12.018594Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:12.101848Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:12.502533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:12.513685Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:12.526194Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.591835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.749116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.814215Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:12.820800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.009860Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491082526989788:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.009983Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.014347Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491082526989798:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.014453Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.087007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.130549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.183964Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.214430Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.241916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.313954Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.355462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.407037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.488991Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491082526990675:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.489098Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.489527Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491082526990680:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.489572Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491082526990681:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.489684Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.492912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:15.504713Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491082526990684:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:15.560250Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491082526990735:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:17.390645Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608491091116925654:2543], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgcq6e0feq3pzbe11hy5s9. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjIzMjZhNDQtOTY5YTVhYWEtMjMwNzQzYTgtMWVmOWE2NDg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2026-02-19T08:31:17.391371Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=NjIzMjZhNDQtOTY5YTVhYWEtMjMwNzQzYTgtMWVmOWE2NDg=, ActorId: [3:7608491091116925625:2536], ActorState: ExecuteState, LegacyTraceId: 01khtgcq6e0feq3pzbe11hy5s9, Create QueryResponse for error on request, msg: status# INTERNAL_ERROR issues# { message: "Terminate was called, reason(17): Bad filter value.\n" severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-02-19T08:31:17.391719Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489877430, txId: 281474976715673] shutting down 2026-02-19T08:31:17.391844Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [3:7608491091116925655:2544], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgcq6e0feq3pzbe11hy5s9. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjIzMjZhNDQtOTY5YTVhYWEtMjMwNzQzYTgtMWVmOWE2NDg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. >> KqpScan::JoinSimple [GOOD] >> KqpScan::Join4 >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] >> KqpScan::IsNullPartial >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::StaleRO_IndexFollowers+EnableFollowers >> KqpScan::Effects [GOOD] >> KqpScan::EmptySet_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: 2026-02-19T08:24:16.585160Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608489281870365893:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:24:16.585229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:24:16.782133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:24:16.800838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:24:16.800919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:24:16.870326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:24:16.872156Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:24:16.882564Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608489281870365863:2081] 1771489456583647 != 1771489456583650 2026-02-19T08:24:16.950512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:24:16.950549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:24:16.950562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:24:16.950708Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:24:17.002390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:24:19.339167Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/ynks/0006d1/r3tmp/spilling-tmp-runner/node_1_29137c69-5862f78c-c2f7a618-be0be5f2, actor: [1:7608489294755269587:2384] 2026-02-19T08:24:19.339349Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/ynks/0006d1/r3tmp/spilling-tmp-runner 2026-02-19T08:24:21.760397Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760462Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,1], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760498Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,2], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760530Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,3], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760562Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,4], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760609Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,5], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760662Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,6], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760710Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,7], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760772Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,8], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760814Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,9], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760852Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:460: StageInfo: StageId #[0,10], InputsCount: 0, OutputsCount: 1 2026-02-19T08:24:21.760929Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01khtfzzxz7jnf3pen33dgyp85, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjRjNTZiMjctNjMyZDhhNjUtNTYwOWMyNTQtYzJhMDk0YTM=, PoolId: default, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-02-19T08:24:21.761174Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:837} ActorId: [1:7608489303345205267:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzzxz7jnf3pen33dgyp85, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjRjNTZiMjctNjMyZDhhNjUtNTYwOWMyNTQtYzJhMDk0YTM=, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 11 trace_id# 2026-02-19T08:24:21.761218Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:852} ActorId: [1:7608489303345205267:2400] TxId: 281474976710672. Ctx: { TraceId: 01khtfzzxz7jnf3pen33dgyp85, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjRjNTZiMjctNjMyZDhhNjUtNTYwOWMyNTQtYzJhMDk0YTM=, PoolId: default, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-02-19T08:24:21.761629Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710672. Resolved key sets: 11 2026-02-19T08:24:21.761810Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.761873Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 44] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.761954Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 42] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762019Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 8 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762089Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 45] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762174Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 43] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (String : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762236Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Int32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762294Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 40] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL, Int64 : NULL, Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762362Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL, Uint32 : NULL, Int64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-02-19T08:24:21.762426Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 47] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Grou ... EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [32:7608491080518441303:2949], CA [32:7608491080518441304:2950], CA [32:7608491080518441301:2947], CA [32:7608491080518441305:2951], CA [32:7608491080518441302:2948], trace_id# 2026-02-19T08:31:15.293664Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [32:7608491080518441302:2948] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1287 Tasks { TaskId: 2 CpuTimeUs: 340 FinishTimeMs: 1771489875287 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 69 ReadBytes: 552 AffectedPartitions: 13 } IngressRows: 69 ComputeCpuTimeUs: 263 BuildCpuTimeUs: 77 HostName: "ghrun-fkrokuvqke" NodeId: 32 StartTimeMs: 1771489875287 CreateTimeMs: 1771489875276 UpdateTimeMs: 1771489875287 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:31:15.293853Z node 32 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [32:7608491080518441302:2948] 2026-02-19T08:31:15.293992Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [32:7608491080518441303:2949], CA [32:7608491080518441304:2950], CA [32:7608491080518441301:2947], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.294100Z node 32 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [32:7608491080518441303:2949], CA [32:7608491080518441304:2950], CA [32:7608491080518441301:2947], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.296182Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [32:7608491080518441301:2947] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 5542 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1633 FinishTimeMs: 1771489875294 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" ReadRows: 131 ReadBytes: 1048 AffectedPartitions: 13 } IngressRows: 131 ComputeCpuTimeUs: 617 BuildCpuTimeUs: 1016 WaitOutputTimeUs: 84 HostName: "ghrun-fkrokuvqke" NodeId: 32 StartTimeMs: 1771489875293 CreateTimeMs: 1771489875274 UpdateTimeMs: 1771489875294 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:31:15.296280Z node 32 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [32:7608491080518441301:2947] 2026-02-19T08:31:15.296394Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [32:7608491080518441303:2949], CA [32:7608491080518441304:2950], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.296482Z node 32 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [32:7608491080518441303:2949], CA [32:7608491080518441304:2950], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.296706Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [32:7608491080518441304:2950] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1089 Tasks { TaskId: 4 CpuTimeUs: 106 FinishTimeMs: 1771489875294 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" AffectedPartitions: 12 } ComputeCpuTimeUs: 53 BuildCpuTimeUs: 53 HostName: "ghrun-fkrokuvqke" NodeId: 32 StartTimeMs: 1771489875294 CreateTimeMs: 1771489875276 UpdateTimeMs: 1771489875294 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:31:15.296768Z node 32 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [32:7608491080518441304:2950] 2026-02-19T08:31:15.296823Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [32:7608491080518441303:2949], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.296878Z node 32 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [32:7608491080518441303:2949], CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.297069Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [32:7608491080518441303:2949] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 614 DurationUs: 1000 Tasks { TaskId: 3 CpuTimeUs: 102 FinishTimeMs: 1771489875295 OutputBytes: 1 Tables { TablePath: "/Root/ManyShardsTable" AffectedPartitions: 12 } ComputeCpuTimeUs: 46 BuildCpuTimeUs: 56 HostName: "ghrun-fkrokuvqke" NodeId: 32 StartTimeMs: 1771489875294 CreateTimeMs: 1771489875276 UpdateTimeMs: 1771489875295 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:31:15.297120Z node 32 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [32:7608491080518441303:2949] 2026-02-19T08:31:15.297187Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:885} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.297246Z node 32 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:170} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [32:7608491080518441305:2951], trace_id# 2026-02-19T08:31:15.297441Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:575} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [32:7608491080518441305:2951] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 4761 DurationUs: 1000 Tasks { TaskId: 5 StageId: 1 CpuTimeUs: 2467 FinishTimeMs: 1771489875295 InputBytes: 4 OutputRows: 1 OutputBytes: 4 ResultRows: 1 ResultBytes: 4 ComputeCpuTimeUs: 247 BuildCpuTimeUs: 2220 HostName: "ghrun-fkrokuvqke" NodeId: 32 StartTimeMs: 1771489875294 CreateTimeMs: 1771489875277 UpdateTimeMs: 1771489875295 } MaxMemoryUsage: 1048576 } trace_id# 2026-02-19T08:31:15.297487Z node 32 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [32:7608491080518441305:2951] 2026-02-19T08:31:15.297688Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1512} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-02-19T08:31:15.297790Z node 32 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1531} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-02-19T08:31:15.297855Z node 32 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1134} ActorId: [32:7608491080518441294:2575] TxId: 281474976710774. Ctx: { TraceId: 01khtgcm7wafmjq6sc52mgv022, Database: /Root, SessionId: ydb://session/3?node_id=32&id=ZmQ2MjcxNWMtY2IxYThhNGUtMTUwODNlMDctZTA1OTE5MjA=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.013293s ReadRows: 200 ReadBytes: 1600 RequestUnits# 200 ForceFlag# true trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningDeleteUpdate-UseSink [GOOD] Test command err: 2026-02-19T08:30:22.105302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490853481311522:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:22.105537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:22.126464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:22.368663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:22.368793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:22.386350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:22.418510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:22.484945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490853481311412:2081] 1771489822097069 != 1771489822097072 2026-02-19T08:30:22.489781Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:22.592642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:22.592664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:22.592672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:22.592810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:22.687915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:23.017639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:23.032320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:23.084053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.113496Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:23.286717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.433489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:23.515302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.234946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490866366215194:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:25.235034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:25.235361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490866366215204:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:25.235431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:25.600411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.634045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.670271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.701345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.733161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.777744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.831134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:25.907601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.008572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870661183369:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.008649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.008768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870661183374:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.008913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870661183376:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.009127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.012991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:26.024035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490870661183378:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:26.113150Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490870661183429:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:27.105797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490853481311522:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:27.105858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... 1:01.677525Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:01.766576Z node 6 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:01.767770Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7608491021278710568:2081] 1771489861654337 != 1771489861654340 2026-02-19T08:31:01.785340Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:01.785423Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:01.812315Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:01.876385Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:01.876412Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:01.876431Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:01.876528Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:01.902178Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:02.533457Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:02.662094Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:05.824580Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608491038458580635:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.824618Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608491038458580648:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.824684Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.826845Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7608491038458580658:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.826938Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:05.828567Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:05.839606Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7608491038458580657:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:31:05.941592Z node 6 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [6:7608491038458580710:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:05.985685Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.656070Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7608491021278710611:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:06.656153Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:09.841411Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608491054635000442:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:09.841481Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:09.856133Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:09.948815Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:09.954033Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608491054635000404:2081] 1771489869840369 != 1771489869840372 2026-02-19T08:31:09.978863Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:09.978964Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:09.995557Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:10.075061Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:10.075089Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:10.075098Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:10.075169Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:10.153989Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:10.694349Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:10.849838Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:14.295127Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491076109837772:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.295177Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491076109837762:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.295235Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.296329Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491076109837792:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.296360Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.300666Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:14.315088Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491076109837791:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:31:14.377603Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491076109837844:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:14.409231Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.843084Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491054635000442:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:14.843170Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> KqpFlowControl::FlowControl_BigLimit [GOOD] >> KqpFlowControl::FlowControl_SmallLimit >> KqpScan::UnionMixed >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> KqpReturning::ReplaceSerial >> KqpScan::TaggedScalar >> KqpPointConsolidation::ReadRanges [GOOD] >> KqpScan::AggregateByColumn >> KqpSplit::AfterResult+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRange+Ascending >> KqpScan::FullFrameWindow >> KqpScan::Join3 >> KqpScan::CrossJoinOneColumn [GOOD] >> KqpScan::CrossJoinCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::TopSortOverSecondaryIndexRead [GOOD] Test command err: 2026-02-19T08:31:03.792551Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491028231962866:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.792700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:04.080263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:04.080398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:04.097755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:04.146567Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:04.152767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:04.278705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:04.278723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:04.278730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:04.278813Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:04.311462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:04.661174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:04.720550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.796568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:04.866705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.015089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.096577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.993103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491041116866501:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.993264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.997240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491041116866511:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:06.997308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.304112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.352831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.389261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.422685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.456055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.495186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.537465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.618445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.696883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045411834675:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.696992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.697281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045411834680:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.697328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491045411834681:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.697448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.701404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:07.715441Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491045411834684:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:07.774085Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491045411834735:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:08.791682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491028231962866:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.791778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:10.516859Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489870514, txId: 281474976715673] shutting down 2026-02-19T08:31:11.453457Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491064272968157:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.453847Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization ... st GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.425666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.486596Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.493457Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:14.711365Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077157871878:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.711441Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.715450Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491077157871888:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.715552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.772248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.827756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.859168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.892722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.922323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.002669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.050327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.134669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.226113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081452840065:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.226184Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.226381Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081452840068:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.226410Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.226698Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491081452840072:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.230621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:15.242426Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491081452840074:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:15.300528Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491081452840125:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:16.453412Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491064272968157:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:16.453483Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.886394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.941373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.016741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"0","PlanNodeId":4,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/SecondaryComplexKeys","Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"1","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","E-Rows":"1","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Fk1","Key"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"TopBy":"row.Fk1","Name":"Top","Limit":"2"}],"Node Type":"Top"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":4}],"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Node Type":"Merge","SortColumns":["Fk1 (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Fk1","Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"Name":"TopSort","Limit":"2","TopSortBy":"row.Fk1"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpScan::MultipleResults >> KqpScan::PureExpr [GOOD] >> KqpScan::RestrictSqlV0 >> KqpSplit::ChoosePartition+Unspecified >> KqpScan::StreamLookupByPkPrefix >> KqpScan::AggregateCountStar [GOOD] >> KqpScan::AggregateEmptyCountStar >> KqpScan::SelfJoin3xSameLabels >> KqpSplit::AfterResultMultiRange+Descending >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [GOOD] >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead [GOOD] >> KqpScan::DqSourceFullScan [GOOD] >> KqpScan::DqSourceLiteralRange >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpScan::TopSort >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinRetryAttemptForFinishedRead [GOOD] Test command err: 2026-02-19T08:31:08.930132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491049783158281:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.930195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:08.989082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:09.241820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:09.243487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:09.243632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:09.247413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:09.371035Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:09.380127Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491049783158061:2081] 1771489868911628 != 1771489868911631 2026-02-19T08:31:09.420449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:09.420474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:09.420481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:09.420590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:09.537807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:09.812011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:09.860172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.926372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:10.004721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.183244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.253659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.176789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491066963029119:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.176904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.177234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491066963029128:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.177312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.546995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.581609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.613136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.640438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.673913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.715671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.747511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.808588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.885092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491066963029995:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.885164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.885407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491066963030000:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.885452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491066963030001:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.885486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.889077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:12.904045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491066963030004:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:13.005220Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491066963030057:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:13.930378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491049783158281:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:13.930447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:14.459738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ty ... :15.865028Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:15.887038Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:15.961007Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:15.964328Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491078960875979:2081] 1771489875861102 != 1771489875861105 2026-02-19T08:31:15.996800Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:15.996879Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:16.002905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:16.077969Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:16.077991Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:16.077998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:16.078074Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:16.175586Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:16.484553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:16.489533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:16.498015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.581219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.717380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.780512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.907676Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:19.011432Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491096140747034:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.011506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.011685Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491096140747044:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.011713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.094509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.127491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.159169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.186962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.218341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.283947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.319013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.365381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.448341Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491096140747912:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.448451Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.448545Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491096140747917:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.448803Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491096140747919:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.448861Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.452158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:19.463786Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491096140747920:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:19.536627Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491096140747972:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:20.865814Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491078960876008:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:20.865881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:21.258705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread ----------------------------------------------------------- >> KqpScan::BoolFlag [GOOD] >> KqpScan::Counters >> KqpScan::StreamExecuteScanQueryClientTimeoutBruteForce [GOOD] >> KqpScan::StreamLookup >> KqpSplit::StreamLookupJoinSplitAfterFirstResult [GOOD] >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> KqpScan::NullInKeySuffix [GOOD] >> KqpScan::NoTruncate >> KqpScan::DecimalColumn [GOOD] >> KqpScan::CustomWindow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupJoinSplitAfterFirstResult [GOOD] Test command err: 2026-02-19T08:31:11.342944Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491063615044323:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.344018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:11.578117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:11.624770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:11.624910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:11.667959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491063615044266:2081] 1771489871331959 != 1771489871331962 2026-02-19T08:31:11.681934Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:11.688434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:11.810073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:11.810092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:11.810097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:11.810168Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:11.871974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:12.273087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:12.336244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.345886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:12.463914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.618831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.704741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.480318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491076499948040:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.480388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.480623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491076499948049:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.480657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.786193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.819670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.849569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.880744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.910840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.943336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.985440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.038423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.117887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491080794916212:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.117989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.121331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491080794916217:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.121398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491080794916218:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.121473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.126143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:15.141485Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491080794916221:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:15.225468Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491080794916272:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:16.340097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491063615044323:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:16.340157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.820888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread -------------- ... tadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:18.255494Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:18.332892Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:18.336700Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491091861955070:2081] 1771489878202291 != 1771489878202294 2026-02-19T08:31:18.371547Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:18.371650Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:18.374451Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:18.430275Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:18.430300Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:18.430309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:18.430391Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:18.477534Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:18.837904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:18.860137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.928765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.063320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.129213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.274362Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:21.302787Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491104746858841:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.302866Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.303113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491104746858851:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.303144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.359094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.391969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.473435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.513326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.546989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.596520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.640104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.694978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.789214Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491104746859720:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.789317Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.789864Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491104746859725:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.789929Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491104746859726:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.790036Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.798264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:21.809420Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491104746859729:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:21.877299Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491104746859780:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:23.213839Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491091861955191:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:23.213914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:23.469677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- >> KqpSort::Offset [GOOD] >> KqpScan::Offset >> KqpScan::Join4 [GOOD] >> KqpScan::JoinLeftOnly >> KqpNewEngine::DqSourceLimit [GOOD] >> KqpNewEngine::DqSourceSequentialLimit >> KqpScan::IsNullPartial [GOOD] >> KqpScan::Join >> KqpScan::TaggedScalar [GOOD] >> KqpScan::StreamLookupFailedRead >> KqpScan::UnionMixed [GOOD] >> KqpScan::UnionSameTable >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc >> KqpScan::EmptySet_1 [GOOD] >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> KqpSort::OffsetTopSort [GOOD] >> KqpScan::AggregateByColumn [GOOD] >> KqpScan::MultipleResults [GOOD] >> KqpScan::MiltiExprWithPure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: 2026-02-19T08:30:23.161183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490857720902340:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:23.162025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:23.373345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:23.389692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:23.389809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:23.448642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:23.485570Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:23.490628Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490857720902294:2081] 1771489823151719 != 1771489823151722 2026-02-19T08:30:23.558352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:23.558384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:23.558393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:23.558485Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:23.598612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:23.932981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:24.001483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:24.136339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:24.237305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:24.281644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:24.339300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.224027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870605806063:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.224155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.224644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870605806073:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.224699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.611443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.640243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.674462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.702898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.729004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.788532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.816587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.860672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:26.935021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870605806945:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.935091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.935233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870605806950:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.935285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490870605806951:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.935402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:26.939484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:26.949245Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490870605806954:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:27.024674Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490874900774301:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:28.155928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490857720902340:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:28.155994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:28.444971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:28.481134Z no ... 146108Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7608491046144047533:2202];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:12.146178Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:17.059919Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608491090857242209:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:17.059973Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:17.081804Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:17.168502Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:17.171509Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608491090857242178:2081] 1771489877058656 != 1771489877058659 2026-02-19T08:31:17.191581Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:17.191700Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:17.218369Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:17.286439Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:17.286464Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:17.286474Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:17.286578Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:17.381095Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:17.879845Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:17.897766Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.000165Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.130147Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:18.205344Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.301302Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.915287Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491108037113241:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.915398Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.915955Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491108037113251:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:21.916023Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.006288Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.060959Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.061843Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491090857242209:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:22.061928Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:22.111935Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.157521Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.213420Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.269754Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.324751Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.389031Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.534106Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491112332081416:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.534240Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.534584Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491112332081421:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.534635Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491112332081422:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.534770Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.540227Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:22.558417Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491112332081425:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:22.616567Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491112332081476:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpScan::RestrictSqlV0 [GOOD] >> KqpFlowControl::FlowControl_SmallLimit [GOOD] >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] >> KqpScan::FullFrameWindow [GOOD] >> KqpScan::EmptySet_2 >> KqpSplit::IntersectionLosesRange+Descending >> KqpScan::AggregateNoColumn [GOOD] >> KqpScan::AggregateNoColumnNoRemaps >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet_1 [GOOD] Test command err: 2026-02-19T08:30:52.152705Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490981187792569:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:52.152779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:52.357760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:52.382509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:52.382642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:52.413825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:52.456605Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490981187792530:2081] 1771489852151304 != 1771489852151307 2026-02-19T08:30:52.468933Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:52.536419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:52.536443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:52.536449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:52.536553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:52.643765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:52.982834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:53.074717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.160872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:53.228093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.369466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:53.437956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.255437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994072696292:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.255569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.255932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994072696302:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.255967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.561372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.605161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.634594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.662415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.697657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.734682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.796230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.835597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:55.920710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994072697169:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.920787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.921367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994072697174:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.921404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490994072697175:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.921511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:55.926371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:55.937435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490994072697178:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:56.015966Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490998367664525:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:57.153010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490981187792569:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:57.153067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:07.336213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:31:07.336261Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:12.885779Z node 1 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [1:7608491067087145188:2093], tablet: [1:7608490985482760855:2318] ... xecution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 2026-02-19T08:31:20.479446Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491102324831425:2250];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:20.479596Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:20.492548Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:20.661881Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491102324831199:2081] 1771489880427837 != 1771489880427840 2026-02-19T08:31:20.669157Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:20.669249Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:20.674798Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:20.690204Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:20.809950Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:20.858466Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:20.858489Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:20.858497Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:20.858576Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.435122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:21.502425Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:21.504301Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:21.517153Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.599072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.780129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.858189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.217998Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491119504702257:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.218083Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.218362Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491119504702267:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.218403Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.309876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.398248Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.448312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.499215Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.541951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.590345Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.633502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.683919Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.801922Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491119504703140:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.802005Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.809853Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491119504703146:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.809854Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491119504703145:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.809969Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.818230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:24.832526Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491119504703149:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:24.908903Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491119504703200:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:25.476500Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491102324831425:2250];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:25.476593Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSplit::ChoosePartition+Unspecified [GOOD] >> KqpSplit::IntersectionLosesRange+Ascending >> KqpScan::AggregateEmptyCountStar [GOOD] >> KqpScan::AggregateEmptySum >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateByColumn [GOOD] Test command err: 2026-02-19T08:31:04.754546Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491034329150837:2097];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:04.754818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:05.001156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:05.029148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:05.029271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:05.104333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:05.112039Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:05.137842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491034329150768:2081] 1771489864748525 != 1771489864748528 2026-02-19T08:31:05.217600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:05.217622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:05.217628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:05.217733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:05.255509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:05.637574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:05.680839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.770159Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:05.840315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.000941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:06.068838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.998610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047214054535:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.998708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.999003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047214054545:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.999066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.300742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.334268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.362806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.395424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.427035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.476748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.511109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.572633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:08.640624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491051509022707:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.640701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.641022Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491051509022712:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.641059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491051509022713:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.641181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:08.644697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:08.655445Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491051509022716:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:08.746309Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491051509022767:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:09.752253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491034329150837:2097];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:09.752317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:10.291674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.458229Z no ... cpp:689) 2026-02-19T08:31:21.294861Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491107356190205:2182];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:21.294974Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:21.343168Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:21.428356Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.428451Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:21.442139Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491107356190052:2081] 1771489881285686 != 1771489881285689 2026-02-19T08:31:21.459915Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:21.471102Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:21.550350Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:21.550374Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:21.550384Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:21.550483Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.575966Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.062875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.070676Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.084212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.173531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.308494Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.364443Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.438959Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.872714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120241093808:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.872823Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.873215Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120241093818:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.873258Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.942597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.997103Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.038336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.077951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.152370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.200470Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.237220Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.290217Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.400880Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491124536061989:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.401016Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.401817Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491124536061995:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.401853Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491124536061994:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.401883Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.406515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.420330Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491124536061998:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:25.480527Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491124536062049:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:26.295248Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491107356190205:2182];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:26.295326Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:27.506784Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489887489, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: 2026-02-19T08:30:33.827228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490898185067406:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:33.827288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:34.139256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:34.139355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:34.172214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:34.178804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:34.211520Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:34.330421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:34.330440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:34.330446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:34.330537Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:34.407204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:34.826406Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:34.829890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:34.835869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:30:34.904861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.049014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.198813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.272922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:36.881644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490911069971041:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.881773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.882087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490911069971051:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:36.882135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.235113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.269395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.301038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.372034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.414527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.465472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.506333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.578474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:37.660809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490915364939223:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.660880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.660951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490915364939228:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.660983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490915364939230:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.661032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:37.664513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:37.673906Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490915364939232:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:30:37.752026Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490915364939283:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:38.820567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490898185067406:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:38.820626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:40.493928Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490932074918171:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:40.495417Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error= ... 2515]; 2026-02-19T08:31:15.152280Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:18.851874Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7608491092838587611:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:18.853950Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:18.869832Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:18.958869Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:18.960804Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7608491092838587564:2081] 1771489878849612 != 1771489878849615 2026-02-19T08:31:19.000939Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:19.001045Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:19.007904Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:19.079272Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:19.079295Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:19.079302Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:19.079359Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:19.099882Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:19.721076Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:19.738358Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:19.747926Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.836954Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.950716Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:20.020700Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.108253Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.419259Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491114313425918:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.419387Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.419790Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491114313425928:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.419865Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.527882Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.600544Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.650197Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.701832Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.754777Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.808833Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.856399Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491092838587611:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:23.857297Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:23.867251Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.921083Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.024555Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491118608394100:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.024648Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.024803Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491118608394105:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.024905Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491118608394107:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.024967Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.029058Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:24.043181Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491118608394109:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:24.114620Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491118608394160:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> ColumnShardTiers::DSConfigs [GOOD] >> KqpScan::StreamLookupByPkPrefix [GOOD] >> KqpScan::StreamLookupByFullPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpFlowControl::FlowControl_SmallLimit [GOOD] Test command err: 2026-02-19T08:31:06.314556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491039741055220:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:06.315520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:06.552421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:06.552524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:06.594016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:06.612611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:06.639618Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:06.641875Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491039741055171:2081] 1771489866309996 != 1771489866309999 2026-02-19T08:31:06.842279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:06.842301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:06.842309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:06.842386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:06.849816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:07.295787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:07.314265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:07.321917Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:07.363257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.498518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.654702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.721801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.576848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491052625958937:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.577026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.577364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491052625958947:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.577410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:09.888660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.919104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.945351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.969400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.994061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.027597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.062516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.131116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.203375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491056920927113:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.203463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.203991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491056920927118:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.204028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491056920927119:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.204146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:10.208555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:10.220155Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491056920927122:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:10.286897Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491056920927173:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:11.311835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491039741055220:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:11.311920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:11.714965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:20.872451Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:20.873842Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:20.874742Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:20.874811Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:20.875048Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491103722474413:2081] 1771489880690090 != 1771489880690093 2026-02-19T08:31:20.905560Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:21.012824Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:21.012844Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:21.012859Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:21.012937Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.137952Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:21.670999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:21.682477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:21.704843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.748842Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:21.811532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.026961Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.101752Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.446802Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120902345470:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.446885Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.447294Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120902345480:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.447344Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.511214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.549898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.587315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.633336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.670192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.744948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.820763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.869702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.959074Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120902346361:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.959166Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.959610Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120902346366:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.959667Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491120902346367:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.959758Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.962788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:24.974239Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491120902346370:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:25.055246Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491125197313717:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:25.693021Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491103722474454:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:25.693093Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:26.719707Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.570663Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489887496, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::RestrictSqlV0 [GOOD] Test command err: 2026-02-19T08:31:08.970748Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491051084523137:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.973448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:09.181835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:09.206428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:09.206517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:09.266527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:09.317483Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491051084523096:2081] 1771489868963260 != 1771489868963263 2026-02-19T08:31:09.324468Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:09.381895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:09.381915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:09.381924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:09.382018Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:09.402328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:09.755730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:09.761890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:09.808418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.940307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.048403Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:10.098060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.168972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.092070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491068264394152:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.092166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.092437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491068264394161:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.092481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.408807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.447985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.477623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.504494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.535371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.571708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.636322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.677895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.761668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491068264395033:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.761765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.762054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491068264395038:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.762064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491068264395039:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.762120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.765518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:12.776501Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491068264395042:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:12.868116Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491068264395093:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:13.966181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491051084523137:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:13.966238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:14.476315Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489874504, txId: 281474976715673] shutting down 2026-02-1 ... _PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:22.168783Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.231053Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.232215Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491112331817240:2081] 1771489882135607 != 1771489882135610 2026-02-19T08:31:22.272413Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:22.272499Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.275920Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.384350Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.402139Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:22.402157Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:22.402163Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:22.402214Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:22.930016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.943635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.027462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.151563Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:23.188698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.259356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.761601Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491125216720994:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.761745Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.762088Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491125216721004:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.762151Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.836821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.875365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.929920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.969218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.997128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.030877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.064820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.108546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.185951Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491129511689169:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.186041Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.186297Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491129511689175:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.186321Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491129511689174:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.186360Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.189877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:26.200482Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491129511689178:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:26.268195Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491129511689229:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:27.141840Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491112331817371:2160];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.141902Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:27.808937Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491133806656826:2540], status: GENERIC_ERROR, issues:
: Error: V0 syntax is disabled 2026-02-19T08:31:27.810062Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NWUyZmQxNWItZjg1NDM2NzMtOTE4NWRhNzItOWMzNmM4NmE=, ActorId: [3:7608491133806656819:2536], ActorState: ExecuteState, LegacyTraceId: 01khtgd1hb6dc2n8909zvq1fxz, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "V0 syntax is disabled" severity: 1 } tx_id# trace_id#
: Error: V0 syntax is disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Ascending [GOOD] Test command err: 2026-02-19T08:31:14.322288Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491078186113425:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:14.323257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:14.336791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:14.593330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:14.593472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:14.638191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:14.663822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:14.709801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491078186113368:2081] 1771489874306453 != 1771489874306456 2026-02-19T08:31:14.716148Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:14.794064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:14.794083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:14.794092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:14.794172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:14.808575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:15.229133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:15.238356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:15.298794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.343335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:15.413317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.552679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.626757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.434189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491091071017133:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:17.434318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:17.434702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491091071017143:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:17.434778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:17.735950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.769901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.805891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.836150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.870990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.920013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.967276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.042134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.145633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491095365985318:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.145730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.146055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491095365985323:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.146142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491095365985324:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.146199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.150023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:18.161384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491095365985327:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:18.221775Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491095365985378:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:19.317832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491078186113425:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:19.317906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... /migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:21.559506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:21.559869Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:21.561642Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491106587784569:2081] 1771489881388518 != 1771489881388521 2026-02-19T08:31:21.572681Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.572764Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:21.577531Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:21.722328Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:21.722356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:21.722369Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:21.722438Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.739603Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.210820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.218149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.240134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.310335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.456362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.497315Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.527211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.722903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491119472688327:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.722991Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.723339Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491119472688337:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.723396Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.795310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.836542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.869866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.908167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.948224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.001118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.038652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.088909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.182848Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491123767656503:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.182940Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.183231Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491123767656508:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.183284Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491123767656509:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.183371Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.187554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.200547Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491123767656512:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:25.271718Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491123767656563:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:26.394173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491106587784710:2170];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:26.394246Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:31:27.819874Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489887356, txId: 281474976715673] shutting down >> KqpSplit::AfterResultMultiRange+Descending [GOOD] >> KqpSplit::AfterResultMultiRange+Unspecified >> KqpScan::SelfJoin3xSameLabels [GOOD] >> KqpScan::SelfJoin3x >> KqpScan::Join3 [GOOD] >> KqpScan::Join3TablesNoRemap >> KqpScan::TopSort [GOOD] >> KqpScan::TooManyComputeActors >> KqpScan::CrossJoinCount [GOOD] >> KqpScan::SingleKey >> KqpScan::DqSourceLiteralRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:29:31.025679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:29:31.102667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:29:31.120363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:29:31.120446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:29:31.120952Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:29:31.384863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:31.384969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:31.442391Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489768965770 != 1771489768965774 2026-02-19T08:29:31.449254Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:31.491669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:31.575641Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:29:32.005369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:32.005416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:32.005445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:32.007673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:32.008377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:32.022606Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-02-19T08:29:43.726541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:957:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.726662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.726754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.727755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:973:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.727863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.732659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:43.752155Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:971:2831], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-02-19T08:29:43.800148Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1024:2865] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:44.010269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.826729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:45.257004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.011337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.640545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:47.114815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:48.267123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.576817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-02-19T08:29:53.704351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:29:53.704428Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-02-19T08:30:04.234960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-02-19T08:30:05.370084Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:227;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2026-02-19T08:30:05.370171Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:30:05.370226Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2026-02-19T08:30:05.370295Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;e ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:16.438054Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:16.438219Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:16.438251Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-02-19T08:31:16.438278Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-02-19T08:31:16.438306Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-02-19T08:31:16.438345Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-02-19T08:31:16.438374Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:16.438409Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:16.438588Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3109:4467];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:16.438685Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3113:4469];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:16.438759Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3117:4472];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-02-19T08:31:27.589932Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.590032Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.590075Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.590215Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.590363Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.590512Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.590573Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-02-19T08:31:27.590637Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.590682Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.590752Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.590805Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.590840Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-02-19T08:31:27.590872Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.590899Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.590944Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.590984Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.591014Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-02-19T08:31:27.591046Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.591074Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.591116Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.591173Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.591406Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.591445Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.591482Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.591527Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.591706Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.591743Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.591777Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.591809Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.591848Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.592251Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3109:4467];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:27.592369Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3113:4469];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:27.592450Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3117:4472];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:27.592511Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.592545Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.592581Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.592610Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.592651Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] >> KqpNamedExpressions::NamedExpressionRandom-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:29:30.627507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:29:30.728620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:29:30.746863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:29:30.746939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:29:30.747395Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:29:30.943289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:30.943413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:30.994386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489768854006 != 1771489768854010 2026-02-19T08:29:31.000533Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:31.041277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:31.130006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:29:31.520014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:31.520057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:31.520090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:31.521795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:31.522275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:31.535500Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-02-19T08:29:43.426239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.426439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.427035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:986:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.427122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.429648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:43.560059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1078:2904], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.560150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.560401Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1082:2908], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.560457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.560502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2911], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:43.563331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:43.680162Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1087:2913], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:29:43.962964Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1178:2977] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:44.595810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:45.046785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:45.693239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.352629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:46.900687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:29:48.013267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:48.302762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-02-19T08:29:52.479240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:29:52.479328Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-02-19T08:30:04.332002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", ... line=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:16.779681Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-02-19T08:31:16.779888Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-02-19T08:31:16.779923Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-02-19T08:31:16.779957Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-02-19T08:31:16.779987Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-02-19T08:31:16.780031Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-02-19T08:31:16.780064Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:16.780106Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:16.780301Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3083:4445];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:16.780411Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3088:4447];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-02-19T08:31:16.780521Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3098:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-02-19T08:31:27.856820Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.856910Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.856952Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.857000Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.857139Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.857341Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.857414Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-02-19T08:31:27.857473Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.857524Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.857591Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.857649Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.857683Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-02-19T08:31:27.857769Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.857804Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.857851Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.857915Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.857958Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-02-19T08:31:27.857992Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858031Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858074Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.858123Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.858153Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.858186Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858218Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858258Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.858318Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.858347Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.858379Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858419Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.858516Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-02-19T08:31:27.858663Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-02-19T08:31:27.858696Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-02-19T08:31:27.858730Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858763Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-02-19T08:31:27.858802Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-02-19T08:31:27.858975Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3083:4445];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:27.859074Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3088:4447];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-02-19T08:31:27.859181Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3098:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningColumnsOrder >> KqpScan::Counters [GOOD] >> KqpScan::UnionBasic >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::CrossJoinCount [GOOD] Test command err: 2026-02-19T08:31:00.307547Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491016528297555:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:00.312532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:00.541596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:00.567028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:00.567147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:00.610864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:00.615440Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:00.616627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491016528297524:2081] 1771489860305265 != 1771489860305268 2026-02-19T08:31:00.737558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:00.737579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:00.737584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:00.737657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:00.807671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:01.136655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:01.200454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.314393Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:01.329732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.491730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:01.555171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.415499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491029413201286:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.415606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.416043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491029413201296:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.416086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:03.675625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.703936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.736631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.772666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.803271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.845824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.884510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:03.954387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.034298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491033708169464:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.034374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.034569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491033708169469:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.034586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491033708169470:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.034619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:04.037941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:04.049550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491033708169473:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:04.104889Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491033708169524:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:05.310666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491016528297555:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:05.310738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:05.552272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:10.593009Z no ... snapshot: [step: 1771489877836, txId: 281474976715675] shutting down 2026-02-19T08:31:20.656180Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489879026, txId: 281474976715678] shutting down 2026-02-19T08:31:21.829251Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:21.829385Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:21.949885Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491104154315204:2081] 1771489881759029 != 1771489881759032 2026-02-19T08:31:21.962939Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:21.973221Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.973322Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:21.978255Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.115593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.126254Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:22.126282Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:22.126292Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:22.126375Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:22.631057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.640439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.657993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.744786Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.869220Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.938060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.020066Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.866717Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491121334186259:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.866819Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.867281Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491121334186269:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.867334Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.934293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.965538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.993071Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.024240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.059029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.095257Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.128025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.180900Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.250229Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491125629154437:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.250322Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.250517Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491125629154442:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.250565Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491125629154443:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.250658Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.254038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:26.263864Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491125629154446:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:26.331934Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491125629154497:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:28.051313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.929219Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489888875, txId: 281474976710675] shutting down >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Descending [GOOD] Test command err: 2026-02-19T08:31:16.892613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491085534403432:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:16.892664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:17.129825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:17.134692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:17.134775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:17.225029Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:17.228628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:17.229280Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491085534403213:2081] 1771489876878802 != 1771489876878805 2026-02-19T08:31:17.354166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:17.354188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:17.354195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:17.354291Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:17.377124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:17.764640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:17.830819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:17.895330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:17.969075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.108652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.177085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:19.944012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491098419306986:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.944136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.944550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491098419306996:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:19.944637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.332122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.363955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.396846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.431606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.464011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.503839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.547601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.616895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.697843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491102714275165:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.697913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.698197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491102714275170:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.698226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491102714275171:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.698262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:20.702642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:20.714179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491102714275174:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:20.799429Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491102714275225:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:21.892452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491085534403432:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:21.892500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:31:23.516819Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489883002, txId: 281474976710673] shutting down 2026-02-19T08:31:24.403535Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:24.403711Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:24.498883Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:24.500389Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491120024101900:2081] 1771489884373989 != 1771489884373992 2026-02-19T08:31:24.528434Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:24.528536Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:24.539814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:24.755886Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:24.756002Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:24.756009Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:24.756016Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:24.756089Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:25.154963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:25.162414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:25.172438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.253590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.400158Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:25.447673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.574203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.513160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491132909005667:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.513235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.513505Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491132909005677:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.513560Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.583682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.622049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.672273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.704042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.733428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.782737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.828724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.882335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.967180Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491132909006545:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.967271Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.967567Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491132909006550:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.967607Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491132909006551:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.967683Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.970977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:27.982774Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491132909006554:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:28.062442Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491137203973901:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:31:30.455067Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489890016, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSourceLiteralRange [GOOD] Test command err: 2026-02-19T08:31:08.575156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491049939896612:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.576308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:08.796858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:08.818088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:08.818197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:08.855025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:08.878643Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:08.879677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491049939896577:2081] 1771489868571517 != 1771489868571520 2026-02-19T08:31:08.936758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:08.936783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:08.936791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:08.936940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:09.037489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:09.330139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:09.391075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.535634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.633523Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:09.680400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:09.739334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.602313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491062824800345:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.602420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.603263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491062824800355:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.603326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:11.947069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.982583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.016904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.046283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.075495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.120094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.184113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.226046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:12.306971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491067119768525:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.307063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.307496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491067119768530:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.307543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491067119768531:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.307696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:12.310903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:12.320608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491067119768534:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:12.379023Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491067119768585:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:13.573918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491049939896612:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:13.573971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.275651Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491084701885397:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:16.276859Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... ode(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:23.564092Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:23.566849Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491115414883186:2081] 1771489883340438 != 1771489883340441 2026-02-19T08:31:23.579384Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:23.587218Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:23.705260Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:31:23.734434Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:23.734465Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:23.734480Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:23.734582Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:23.744519Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:24.286866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:24.295324Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:24.317422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.422691Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:24.450425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.627131Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.734467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.078609Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491132594754241:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.078720Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.079787Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491132594754251:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.079884Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.145764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.182098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.211103Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.245208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.273495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.305589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.345761Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.398456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.492030Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491132594755122:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.492125Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.492336Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491132594755127:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.492373Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491132594755128:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.492414Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.495901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:27.510201Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491132594755131:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:27.601140Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491132594755182:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:28.352113Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491115414883416:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:28.352187Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:29.321535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.847868Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489889883, txId: 281474976710675] shutting down 2026-02-19T08:31:30.113156Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489890149, txId: 281474976710677] shutting down >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNamedExpressions::NamedExpressionRandom-UseSink [GOOD] Test command err: 2026-02-19T08:29:41.394899Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490677066236538:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:41.394952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:41.583272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:41.590468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:41.590581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:41.644117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:41.682443Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490677066236495:2081] 1771489781393221 != 1771489781393224 2026-02-19T08:29:41.690931Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:41.748758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:41.748777Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:41.748790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:41.748922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:41.806721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:42.156583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:42.212005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.331008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.426357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:42.472441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:42.527647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.092317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689951140258:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.092415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.092745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689951140268:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.092791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.352215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.379762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.408996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.433470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.458497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.489567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.543278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.583425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:44.645740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689951141139:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.645911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.645947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689951141144:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.646115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490689951141146:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.646156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:44.649010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:44.658094Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490689951141148:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:44.719079Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490689951141199:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:46.364027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:46.395646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490677066236538:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:46.395794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:29:47.341723Z no ... 900e-60fbb15a8c50"]]] [["32851214-1178-4482-bee6-6c6d39ee6d97"];["bdff9bb4-17f7-4715-b424-bec0b5c089cf"]] 2026-02-19T08:31:19.444352Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7608491097924454094:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:19.444983Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:19.547878Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:19.549790Z node 12 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:19.589657Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:19.590216Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:19.604850Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:19.656869Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:19.656894Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:19.656905Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:19.657008Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:19.791976Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:20.251352Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:20.261073Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:20.277112Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.375094Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.538844Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:20.637917Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.742587Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.445810Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7608491097924454094:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:24.445898Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:25.048432Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608491123694259688:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.048547Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.048822Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608491123694259697:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.048872Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.159000Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.212376Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.260534Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.304799Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.353015Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.411835Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.462685Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.532112Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.705164Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608491123694260581:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.705313Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.705413Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608491123694260586:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.705687Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7608491123694260588:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.705797Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.710300Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.725107Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7608491123694260590:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:25.802480Z node 12 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [12:7608491123694260644:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[["3fcfee4e-401c-4fab-817b-958b7f9fb559"]];[["84a936b2-98b7-4c63-8c14-7c39edfe6d87"]]] [[["3fcfee4e-401c-4fab-817b-958b7f9fb559"]];[["84a936b2-98b7-4c63-8c14-7c39edfe6d87"]]] [["277867cf-279c-42b8-b8f0-2163249486c6"];["757ff07d-c76e-42d6-be06-a3a0694636c8"]] >> KqpScan::StreamLookup [GOOD] >> KqpScan::NoTruncate [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Counters [GOOD] Test command err: 2026-02-19T08:31:10.593727Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491058504000754:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:10.595474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:10.805809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:10.829670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:10.829774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:10.880658Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:10.882675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491058504000702:2081] 1771489870591348 != 1771489870591351 2026-02-19T08:31:10.894651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:10.994556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:10.994580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:10.994586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:10.994677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:10.999769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:11.399654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:11.472273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.604329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:11.677761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.877317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:11.954216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.828057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491071388904471:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.828182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.828852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491071388904481:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:13.828899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.194616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.229581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.263072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.292178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.315881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.348750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.385156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.426004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.499225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491075683872646:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.499286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.499493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491075683872651:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.499513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.499524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491075683872652:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:14.502901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:14.512963Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491075683872655:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:14.583492Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491075683872706:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:15.593628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491058504000754:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:15.593722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:16.900934Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489876926, txId: 281474976710673] shutting down 2026-02-19T08:31:17.900177Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491088876758155:2074];send_to=[0:730719953 ... 9T08:31:23.838050Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489883877, txId: 281474976710675] shutting down 2026-02-19T08:31:24.938756Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491118541823630:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:24.938798Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:25.001077Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:25.078733Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:25.080157Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491118541823600:2081] 1771489884925945 != 1771489884925948 2026-02-19T08:31:25.120262Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:25.120348Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:25.124914Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:25.217203Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:25.234318Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:25.234345Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:25.234353Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:25.234444Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:25.814498Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:25.832468Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.908912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.019539Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:26.056928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.112339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.478926Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491135721694655:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.479017Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.479282Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491135721694665:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.479326Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.560157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.611771Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.654382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.699896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.741238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.797119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.853550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.916019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.017214Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491140016662829:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.017311Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.017631Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491140016662834:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.017668Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491140016662835:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.017782Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.021552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:29.038463Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491140016662838:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:29.128289Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491140016662889:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:29.938886Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491118541823630:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:29.938992Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:31.206523Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489891241, txId: 281474976715673] shutting down >> KqpScan::CustomWindow [GOOD] >> KqpScan::DqSource >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> KqpScan::UnionSameTable [GOOD] |95.1%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScan::Offset [GOOD] >> KqpScan::Order >> KqpScan::MiltiExprWithPure [GOOD] >> KqpScan::LongStringCombiner >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> ColumnShardTiers::TTLUsage [GOOD] >> KqpAgg::GroupByLimit [GOOD] >> KqpAgg::AggHashShuffle+UseSink |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncDropSameView >> KqpScan::Join [GOOD] >> KqpScan::JoinLeftOnly [GOOD] >> KqpScan::Join2 |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> KqpScan::EmptySet_2 [GOOD] >> KqpScan::EmptySet_3 >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:32.673257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:32.673345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:32.673393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:32.673431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:32.673457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:32.673503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:32.673529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:32.673566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:32.673619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:32.673677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:32.674370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:32.674653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:32.752116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:32.752181Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:32.763854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:32.764715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:32.764908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:32.777511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:32.782216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:32.782971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:32.783362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:32.786845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:32.787015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:32.788179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:32.788241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:32.788356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:32.788403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:32.788445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:32.788611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:32.918781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.919908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:32.920277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... TxId: 102 at schemeshard: 72057594046678944 2026-02-19T08:31:33.994783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:877:2831] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-02-19T08:31:33.995309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:129:2153], Recipient [1:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:31:33.995365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:31:33.995459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:31:33.995545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-02-19T08:31:33.995587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:31:33.995624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-02-19T08:31:33.995669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-02-19T08:31:33.995750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-02-19T08:31:33.995798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Done -> Done 2026-02-19T08:31:33.999618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:31:33.999680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-02-19T08:31:33.999817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435072, Sender [1:129:2153], Recipient [1:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-02-19T08:31:33.999856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5315: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-02-19T08:31:33.999927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:31:33.999978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:31:34.000093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:31:34.000127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:34.000166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:34.000207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:34.000243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:34.000291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:31:34.000390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:781:2746] message: TxId: 102 2026-02-19T08:31:34.000459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:34.000504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:31:34.000541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:31:34.000719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:31:34.000765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:31:34.006857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:31:34.006992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:781:2746] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-02-19T08:31:34.007206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:31:34.007282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:908:2854] 2026-02-19T08:31:34.007509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877764, Sender [1:910:2856], Recipient [1:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:31:34.007576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5397: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-02-19T08:31:34.007615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6299: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-02-19T08:31:34.008170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:981:2925], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-02-19T08:31:34.008235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:31:34.008361Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:31:34.008648Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy" took 271us result status StatusSuccess 2026-02-19T08:31:34.009245Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy" PathDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableCopy" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:34.009950Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:982:2926], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-02-19T08:31:34.010026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:31:34.010142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:31:34.010356Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex" took 208us result status StatusPathDoesNotExist 2026-02-19T08:31:34.010559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TableCopy/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/TableCopy\' (id: [OwnerId: 72057594046678944, LocalPathId: 41])" Path: "/MyRoot/TableCopy/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/TableCopy" LastExistedPrefixPathId: 41 LastExistedPrefixDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionSameTable [GOOD] Test command err: 2026-02-19T08:31:20.809573Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491101570390442:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:20.877190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:21.129829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:21.169888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.169982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:21.240972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:21.266815Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:21.267870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491101570390178:2081] 1771489880733619 != 1771489880733622 2026-02-19T08:31:21.288087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:21.482166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:21.482184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:21.482194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:21.482264Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.795141Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.021322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.034715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.140486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.324903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.505196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.577191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.198143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118750261246:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.198268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.198581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118750261256:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.198627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.496701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.535617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.569440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.602825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.636503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.683896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.720384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.782768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.907192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118750262126:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.907252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.907450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118750262131:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.907505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118750262132:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.907676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.911241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:24.920514Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491118750262135:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:25.022997Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491123045229482:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:25.809832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491101570390442:2269];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:25.809892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:26.844140Z node 1 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [1:7608491127340197155:2055], tablet: [1:7608491110160325819:2320], scanId: 2, table: /Root/EightShar ... ing down 2026-02-19T08:31:27.803680Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491130418648553:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.820703Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:27.821017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:27.894864Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:27.897662Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491130418648499:2081] 1771489887796846 != 1771489887796849 2026-02-19T08:31:27.908532Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:27.908606Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:27.931577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:28.025467Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:28.049733Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:28.049752Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:28.049760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:28.049830Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:28.484889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:28.491002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:28.501620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.586436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.741768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.809677Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:28.853486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.913831Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491143303552251:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.913929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.914245Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491143303552261:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.914288Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.984121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.041542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.074971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.108772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.147996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.195813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.238322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.304987Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.392513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491147598520429:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.392648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.393187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491147598520434:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.393244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491147598520435:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.393367Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.398823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:31.412046Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491147598520438:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:31.483331Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491147598520489:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:32.798454Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491130418648553:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:32.798534Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:33.071547Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489893110, txId: 281474976710673] shutting down >> TSchemeShardViewTest::DropView >> TSchemeShardViewTest::ReadOnlyMode >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] >> TSchemeShardViewTest::EmptyName >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] >> TSchemeShardViewTest::EmptyQueryText >> KqpSplit::IntersectionLosesRange+Descending [GOOD] >> KqpScan::StreamLookupFailedRead [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] >> KqpSplit::IntersectionLosesRange+Unspecified >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query >> KqpScan::StreamLookupByFullPk [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TSchemeShardViewTest::DropView [GOOD] >> KqpStats::StatsProfile >> KqpScan::AggregateEmptySum [GOOD] >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects >> KqpScan::AggregateNoColumnNoRemaps [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpScan::SelfJoin3x [GOOD] >> KqpTypes::QuerySpecialTypes >> KqpScan::SingleKey [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpScan::SimpleWindow >> KqpScan::SqlInParameter >> KqpScan::AggregateWithFunction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:32.944434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:32.944530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:32.944618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:32.944667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:32.944706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:32.944748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:32.944780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:32.944828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:32.944891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:32.944967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:32.945871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:32.946168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:33.031966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:33.032035Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:33.047987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:33.050161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:33.050349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:33.063296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:33.064035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:33.064737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:33.065035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:33.068163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:33.068328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:33.069469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:33.069540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:33.069666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:33.069733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:33.069776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:33.069975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:33.228646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.232949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:33.233471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... hildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 45 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:34.965738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1216:3125], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-02-19T08:31:34.965806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:31:34.965894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:31:34.966259Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2" took 345us result status StatusSuccess 2026-02-19T08:31:34.967099Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2" PathDescription { Self { Name: "ValueIndex2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex2" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value2" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:34.967780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [1:1217:3126], Recipient [1:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-02-19T08:31:34.967853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:31:34.967980Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:31:34.968204Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2/indexImplTable" took 249us result status StatusSuccess 2026-02-19T08:31:34.968620Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 47 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::JoinLeftOnly [GOOD] Test command err: 2026-02-19T08:31:13.202948Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491073050635627:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:13.203839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:13.453773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:13.475929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:13.476043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:13.523113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:13.558872Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:13.560296Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491073050635595:2081] 1771489873201168 != 1771489873201171 2026-02-19T08:31:13.617956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:13.617973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:13.617977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:13.618048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:13.652208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:14.042624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:14.049187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:14.114974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.217932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:14.244468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.376029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:14.451836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.249722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491085935539362:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.249846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.250156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491085935539372:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.250186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.545319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.578063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.610537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.643535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.680722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.728369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.808443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.853122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.939063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491085935540241:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.939147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.939431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491085935540246:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.939471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491085935540247:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.939570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.943409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:16.964398Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491085935540250:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:17.055175Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491090230507597:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:18.205811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491073050635627:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:18.205875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:18.492158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... eScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:27.413266Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491131791024477:2081] 1771489887291521 != 1771489887291524 2026-02-19T08:31:27.459947Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:27.460046Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:27.462184Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:27.478169Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:27.566930Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:27.566954Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:27.566962Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:27.567047Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:27.696229Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:28.086953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:28.094502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:28.116213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.180881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.315108Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:28.361654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.437866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.122035Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491148970895537:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.122139Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.122564Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491148970895547:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.122623Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.207379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.250491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.287692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.325692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.359151Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.392495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.425172Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.481808Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.564982Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491148970896415:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.565068Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.565382Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491148970896420:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.565422Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491148970896421:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.565536Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.574305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:31.586279Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491148970896424:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:31.658189Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491148970896475:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:32.303134Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491131791024708:2260];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:32.303219Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:33.093431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.761166Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489893789, txId: 281474976710675] shutting down 2026-02-19T08:31:34.194673Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489894209, txId: 281474976710677] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.550548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.550647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.550744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.550816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.550863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.550899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.550930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.550961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.551060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.551143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.552026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.552350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:35.640533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:35.640602Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.652881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:35.653556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:35.653769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:35.667545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:35.668290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:35.669056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:35.669385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:35.672716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.672914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:35.674182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:35.674249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.674400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:35.674453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:35.674498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:35.674686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:35.823151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.824932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.825033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.825121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.825191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.825255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.825324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:36.768136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:783:2749] sender: [1:842:2058] recipient: [1:15:2062] 2026-02-19T08:31:36.808403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:31:36.808661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2026-02-19T08:31:36.808766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2026-02-19T08:31:36.808913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:31:36.809002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:31:36.809042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: 2026-02-19T08:31:36.809098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:31:36.811808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-02-19T08:31:36.812014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2026-02-19T08:31:36.812227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.812288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2026-02-19T08:31:36.812337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-02-19T08:31:36.812444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:31:36.814341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-02-19T08:31:36.814509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-02-19T08:31:36.815268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.815403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.815456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2026-02-19T08:31:36.815606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 Propose -> Done 2026-02-19T08:31:36.815774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.815863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:31:36.817733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.817780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.817980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:31:36.818076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.818179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:833:2787], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-02-19T08:31:36.818225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:833:2787], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:31:36.818628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.818687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:31:36.818798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:31:36.818834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:31:36.818903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:31:36.818935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:31:36.818990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:31:36.819047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:31:36.819086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:31:36.819118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:31:36.819187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:31:36.819226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-02-19T08:31:36.819261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:31:36.819306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:31:36.819934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:31:36.820039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:31:36.820086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:31:36.820141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:31:36.820181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:31:36.821056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:31:36.821130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:31:36.821177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:31:36.821212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:31:36.821242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:31:36.821306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:31:36.824499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:31:36.828450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 >> KqpScan::Join3TablesNoRemap [GOOD] >> KqpScan::Join3Tables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.893638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.893743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.893810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.893866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.893900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.893930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.893956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.893983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.894051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.894129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.894929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.895231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:35.983114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:35.983166Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.994003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:35.994935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:35.995116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:36.005748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:36.006537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:36.007242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.007550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:36.010540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.010715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:36.011905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.011964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.012085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:36.012123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.012165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:36.012312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.166260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.167976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.168030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.168088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.168143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:36.653996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.654121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:31:36.654244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.654294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:31:36.654326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:31:36.654379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:31:36.655784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.655830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:31:36.655868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:31:36.657246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.657281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.657317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:36.657374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:31:36.657477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:31:36.658750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:31:36.658885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:31:36.659143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.659250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.659291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:36.659550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:31:36.659596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:36.659723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.659835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:36.661355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.661397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.661551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.661588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:31:36.661886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.661926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:31:36.662001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:31:36.662030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:31:36.662077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:31:36.662119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:31:36.662152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:31:36.662184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:31:36.662214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:31:36.662249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:31:36.662308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.662340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:31:36.662366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:31:36.662819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:31:36.662895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:31:36.662927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:31:36.662962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:31:36.662998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.663061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:31:36.665648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:31:36.666131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:31:36.666720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:673:2661] Bootstrap 2026-02-19T08:31:36.667743Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:673:2661] Become StateWork (SchemeCache [1:678:2666]) 2026-02-19T08:31:36.669396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:31:36.669523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2026-02-19T08:31:36.669604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2026-02-19T08:31:36.669681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-02-19T08:31:36.670432Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:31:36.672877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.673100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2026-02-19T08:31:36.673463Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] Test command err: 2026-02-19T08:31:22.314785Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491111951776411:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:22.315262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:22.657803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.691288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:22.691385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.793097Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491111951776369:2081] 1771489882290025 != 1771489882290028 2026-02-19T08:31:22.815344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.817329Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.981227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:23.010041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:23.010061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:23.010067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:23.010144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:23.325879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:23.486661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:23.556336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.748464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.909534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.977171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.866433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124836680132:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.866542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.866875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124836680142:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.866943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.177232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.212487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.245743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.278548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.309288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.341473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.372581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.424091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.510096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491129131648313:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.510194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.510282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491129131648318:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.510479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491129131648320:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.510512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.514216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:26.526116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491129131648322:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:26.598541Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491129131648373:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:27.292476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491111951776411:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.292529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:31:28.669974Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489888245, txId: 281474976715673] shutting down 2026-02-19T08:31:29.723958Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:29.724320Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:29.803047Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:29.809847Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491140781866824:2081] 1771489889677808 != 1771489889677811 2026-02-19T08:31:29.835867Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:29.835946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:29.842336Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:29.897077Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:29.897103Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:29.897110Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:29.897183Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:29.971818Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:30.267270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:30.274871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:30.288843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.353289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.485527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.562513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.707054Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:33.096424Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491157961737876:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.096509Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.096750Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491157961737885:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.096791Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.179837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.210412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.239608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.267342Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.293839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.334352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.408253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.472276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.545594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491157961738760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.545690Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.545810Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491157961738765:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.547992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491157961738767:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.548065Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.549672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:33.562567Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491157961738768:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:33.624152Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491157961738820:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:35.610534Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489895609, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupFailedRead [GOOD] Test command err: 2026-02-19T08:31:20.933940Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491100210130767:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:20.934000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:20.984353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:21.338414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.338540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:21.359541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:21.373004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:21.397830Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491100210130632:2081] 1771489880908777 != 1771489880908780 2026-02-19T08:31:21.405546Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:21.526683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:21.528498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:21.528510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:21.528517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:21.528591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.945850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.045639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.052469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.135401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.299835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.469174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.573846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.407053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491117390001689:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.407179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.408217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491117390001699:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.408276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.820537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.894889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.938744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.010674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.048088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.084606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.126700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.170866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.258175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121684969872:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.258255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.258358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121684969877:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.258515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121684969879:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.258707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.261980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.273347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491121684969881:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:25.334064Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491121684969932:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:25.933588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491100210130767:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:25.933649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... d : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:31:35.596801Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:31:35.596839Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.597305Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.597354Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:351: StreamLookupActor, inputIndex: 0, CA Id [2:3455:4593]Returned 0 bytes, 0 rows, finished: 0 2026-02-19T08:31:35.597396Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-02-19T08:31:35.597426Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.597589Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.597634Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:351: StreamLookupActor, inputIndex: 0, CA Id [2:3455:4593]Returned 0 bytes, 0 rows, finished: 0 2026-02-19T08:31:35.597666Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-02-19T08:31:35.597689Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.597834Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:462: StreamLookupActor, inputIndex: 0, CA Id [2:3455:4593]Recv TEvReadResult (stream lookup) from ShardID=72075186224037888, Table = /Root/Table1, ReadId=3 (current ReadId=3), SeqNo=1, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-02-19T08:31:35.597946Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-02-19T08:31:35.598013Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.598066Z node 2 :KQP_COMPUTE DEBUG: kqp_stream_lookup_actor.cpp:351: StreamLookupActor, inputIndex: 0, CA Id [2:3455:4593]Returned 10 bytes, 1 rows, finished: 1 2026-02-19T08:31:35.598156Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.598208Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:31:35.598287Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.598318Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3457:4595], TxId: 281474976715660, task: 4. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.598391Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [2:3457:4595], TxId: 281474976715660, task: 4. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:31:35.598473Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.598523Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [2:3455:4593], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-02-19T08:31:35.598907Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715660;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:31:35.599226Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:31:35.599278Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [2:3456:4594], TxId: 281474976715660, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:31:35.599369Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715660;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:31:35.599477Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:3457:4595], TxId: 281474976715660, task: 4. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:31:35.599502Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [2:3457:4595], TxId: 281474976715660, task: 4. Ctx: { CheckpointId : . TraceId : 01khtgd6we7f33vatwqzy5vs4p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWJjZDhmZGQtOTc5MDhjNmYtYjUyNmM2YmMtNGQ4OTIzMzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-02-19T08:31:35.599587Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715660;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpScan::TooManyComputeActors [GOOD] >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IndexAutochooserTopSortDisabled >> KqpScan::UnionBasic [GOOD] >> KqpScan::UnionAggregate >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:29:31.304834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:29:31.383118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:29:31.396244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:29:31.396302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:29:31.396689Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:29:31.620176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:31.620303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:31.674938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489769264169 != 1771489769264173 2026-02-19T08:29:31.684655Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:31.727524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:31.829342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:29:32.236196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:32.236257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:32.236282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:32.238223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:32.238826Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:32.252495Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:32.385141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-02-19T08:29:32.447164Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:914:2780], Recipient [1:957:2811]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:29:32.448135Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:914:2780], Recipient [1:957:2811]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:29:32.448436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:29:32.468411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:29:32.468656Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-02-19T08:29:32.474665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:32.474879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:32.475052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:32.475121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:32.475189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:32.475263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:32.475350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:32.475420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:32.475484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:32.475573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:29:32.475657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:29:32.475753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:29:32.475830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:29:32.490474Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828684, Sender [1:914:2780], Recipient [1:957:2811]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:29:32.491170Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:915:2781], Recipient [1:959:2813]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:29:32.491678Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828672, Sender [1:918:2783], Recipient [1:964:2817]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:29:32.492794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-02-19T08:29:32.492870Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:407: StateInit, received event# 268828673, Sender [1:915:2781], Recipient [1:959:2813]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:29:32.493037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-02-19T08:29:32.511890Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-02-19T08:29:32.512070Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037889 2026-02-19T08:29:32.516203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:29:32.516299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:29:32.516434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:29:32.516500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:29:32.516564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:29:32.516648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:29:32.516720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:29:32.516784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:29:32.516851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:29:32.516947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];tablet_id=72075186224037889;proces ... 2817]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.230947Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:964:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:31:33.231021Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:969:2820]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.231057Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:969:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP column0/uint64_value: 0 TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5027;count=35;size=14676;count=211;size=1512;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3738;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5027;count=35;size=14746;count=212;size=1512;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3738;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-02-19T08:31:33.406625Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.406687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:31:33.406753Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2813]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.406777Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:31:33.406826Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:964:2817]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.406852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:964:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:31:33.406901Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:969:2820]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.406922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:969:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP TEvBlobStorage::TEvPut tId=72057594037968897;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1512;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3738;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1512;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3738;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14816;count=213;size=1512;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3738;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-02-19T08:31:33.568283Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.568363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-02-19T08:31:33.568447Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:959:2813]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.568484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:959:2813];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-02-19T08:31:33.568549Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:964:2817]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.568582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:964:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-02-19T08:31:33.568651Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:426: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:969:2820]: NActors::TEvents::TEvWakeup 2026-02-19T08:31:33.568684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:969:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP REQUEST=SELECT COUNT(*) FROM `/Root/olapStore/olapTable`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.180123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.180206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.180271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.180300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.180329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.180385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.180415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.180444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.180504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.180565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.181256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.181527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:35.254153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:35.254212Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.266789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:35.267643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:35.267843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:35.280537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:35.281341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:35.282156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:35.282527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:35.285983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.286213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:35.287605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:35.287689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.287811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:35.287860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:35.287902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:35.288113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:35.423711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.424987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.425036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.425073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.425108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... X_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-02-19T08:31:36.020807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-02-19T08:31:36.021135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-02-19T08:31:36.021469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.021576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.021630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-02-19T08:31:36.021775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:31:36.021942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.022024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:31:36.023782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.023837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.024033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:31:36.024150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.024206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:31:36.024252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:31:36.024557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.024608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:31:36.024710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:31:36.024747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.024817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:31:36.024849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.024888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:31:36.024935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.024977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:31:36.025008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:31:36.025071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:31:36.025109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:31:36.025141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:31:36.025169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:31:36.025861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.025990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.026034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:31:36.026073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:31:36.026124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.026759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.026842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.026888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:31:36.026922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:31:36.026963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:31:36.027047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:31:36.032586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:31:36.032738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:31:36.032944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:31:36.032985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:31:36.033307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:31:36.033374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:31:36.033411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2689] TestWaitNotification: OK eventTxId 101 2026-02-19T08:31:36.033836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:36.034052Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 225us result status StatusSuccess 2026-02-19T08:31:36.034438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.142936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.143016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.143095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.143125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.143163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.143192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.143214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.143236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.143289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.143367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.144112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.144360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:35.217149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:35.217210Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.230040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:35.230701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:35.230886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:35.243164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:35.243777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:35.244324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:35.244576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:35.247068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.247226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:35.248221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:35.248270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.248359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:35.248408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:35.248449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:35.248576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:35.398233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.399988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:35.400483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... d [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.027285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:31:36.029064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.029115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.029254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:31:36.029388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.029445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:31:36.029495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:31:36.029806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.029851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:31:36.029953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:36.029988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.030023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:36.030066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.030102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:31:36.030138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.030183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:31:36.030217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:31:36.030282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:31:36.030322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:31:36.030361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:31:36.030390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:31:36.030988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.031094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.031144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:31:36.031185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:31:36.031225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.031674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.031746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.031771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:31:36.031805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:31:36.031834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:31:36.031924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:31:36.032103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:31:36.032140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:31:36.032220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.035369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:31:36.035595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:31:36.036450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2026-02-19T08:31:36.036751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:31:36.036816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-02-19T08:31:36.036916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:31:36.036939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-02-19T08:31:36.037002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:31:36.037023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:31:36.037500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:31:36.037638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:31:36.037687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:731:2719] 2026-02-19T08:31:36.037862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:31:36.037958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:31:36.038014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:31:36.038043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:731:2719] 2026-02-19T08:31:36.038091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:31:36.038113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:731:2719] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-02-19T08:31:36.038611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:36.038798Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 200us result status StatusPathDoesNotExist 2026-02-19T08:31:36.038979Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.806201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.806280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.806324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.806351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.806390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.806426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.806446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.806487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.806540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.806595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.807224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.807466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:35.874276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:35.874322Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.884895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:35.885512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:35.885695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:35.911089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:35.912216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:35.912893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:35.913193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:35.916689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.916884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:35.918123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:35.918188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:35.918300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:35.918357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:35.918401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:35.918682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.044663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.045995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.046033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.046084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ] Become StateWork (SchemeCache [1:682:2670]) 2026-02-19T08:31:36.564279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:31:36.564486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2026-02-19T08:31:36.564567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2026-02-19T08:31:36.564690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:443: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:31:36.564752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-02-19T08:31:36.564794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-02-19T08:31:36.564854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:31:36.565458Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:677:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:31:36.568582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-02-19T08:31:36.568809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2026-02-19T08:31:36.569048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.569102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2026-02-19T08:31:36.569153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.569255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:31:36.570915Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-02-19T08:31:36.571363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-02-19T08:31:36.571491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-02-19T08:31:36.571831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.571937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.571985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-02-19T08:31:36.572142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 Propose -> Done 2026-02-19T08:31:36.572326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.572401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:31:36.574166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.574216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.574382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:31:36.574477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.574528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:31:36.574597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:31:36.574882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.574931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:31:36.575032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:31:36.575064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.575103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:31:36.575136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.575173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:31:36.575229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:31:36.575273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:31:36.575310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:31:36.575368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:31:36.575405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:31:36.575439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-02-19T08:31:36.575467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-02-19T08:31:36.576165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.576273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.576321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:31:36.576357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-02-19T08:31:36.576394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.577051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.577156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:31:36.577186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:31:36.577221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-02-19T08:31:36.577261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:31:36.577336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:31:36.579835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:31:36.580797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:35.968185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:35.968265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.968326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:35.968362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:35.968392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:35.968425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:35.968463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:35.968493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:35.968578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:35.968651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:35.969429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:35.969727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:36.048902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:36.048963Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:36.061509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:36.062251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:36.062469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:36.073300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:36.073979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:36.074634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.074889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:36.077445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.077617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:36.078847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.078909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.079012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:36.079051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.079090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:36.079285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.216544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.217995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:36.218421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:31:36.749347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-02-19T08:31:36.749498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-02-19T08:31:36.749768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:36.749860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:36.749893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-02-19T08:31:36.749998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> Done 2026-02-19T08:31:36.750109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.750156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:31:36.751640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:36.751684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:36.751858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:31:36.751970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:36.751998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:31:36.752026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:31:36.752276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:31:36.752329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:31:36.752407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:36.752429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.752464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:31:36.752502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.752528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:31:36.752554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:31:36.752576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:31:36.752601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:31:36.752644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:31:36.752668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:31:36.752690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:31:36.752709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-02-19T08:31:36.753093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.753174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.753207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:31:36.753233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:31:36.753262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:31:36.753778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.753867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:36.753893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:31:36.753936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-02-19T08:31:36.753957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:31:36.754008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:31:36.754227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:31:36.754254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:31:36.754302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:31:36.757103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:31:36.757662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:31:36.757760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:31:36.757981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:31:36.758039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:31:36.758391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:31:36.758468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:31:36.758500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:729:2717] TestWaitNotification: OK eventTxId 102 2026-02-19T08:31:36.758891Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:36.759068Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 193us result status StatusPathDoesNotExist 2026-02-19T08:31:36.759231Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning-UseSink+UseDataQuery [GOOD] >> KqpNewEngine::StaleRO_IndexFollowers+EnableFollowers [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpNamedExpressions::NamedExpressionRandomUpsertRevert-UseSink-UseDataQuery >> KqpNewEngine::StaleRO_IndexFollowers-EnableFollowers >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::NoTruncate [GOOD] Test command err: 2026-02-19T08:31:12.630305Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491067092243258:2173];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:12.630342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:12.680135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:12.922666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:12.922750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:12.927980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:12.931912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:12.994328Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:12.996938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491067092243116:2081] 1771489872624084 != 1771489872624087 2026-02-19T08:31:13.083347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:13.083589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:13.083601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:13.083608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:13.083700Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:13.542671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:13.552493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:13.621883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.642616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:13.740743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.907969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:13.996140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.851263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491079977146878:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.851358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.851783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491079977146888:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:15.851827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.191518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.226307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.258766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.290973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.322550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.354080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.385277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.459729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.534658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491084272115051:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.534752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.534961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491084272115056:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.535031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491084272115057:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.535085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:16.538889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:16.550644Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491084272115060:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:16.608110Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491084272115111:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:17.633823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491067092243258:2173];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:17.633889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:26.545565Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:26.623571Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491129224763645:2081] 1771489886514338 != 1771489886514341 2026-02-19T08:31:26.630892Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:26.643468Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:26.643569Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:26.646231Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:26.705626Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:26.705654Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:26.705669Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:26.705775Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:26.716299Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:27.161415Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:27.167563Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:27.180834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.264512Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.448523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.519508Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.522472Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:29.884628Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491142109667406:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.884737Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.885370Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491142109667416:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.885430Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.967830Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.007283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.048011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.085547Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.119520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.154916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.232805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.291363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.381739Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491146404635587:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.381823Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.382141Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491146404635593:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.382196Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491146404635592:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.382231Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.386022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:30.397837Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491146404635596:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:30.456969Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491146404635647:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:31.517814Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491129224763686:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:31.517895Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:32.156033Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.469747Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489892501, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookup [GOOD] Test command err: 2026-02-19T08:31:03.898436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491030715157243:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:03.898532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:04.148943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:04.149203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:04.191656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:04.216686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491030715156984:2081] 1771489863838001 != 1771489863838004 2026-02-19T08:31:04.229648Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:04.234778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:04.317572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:04.317589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:04.317594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:04.317689Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:04.371776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:04.781781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:04.787752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:04.827045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:04.897378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:04.975516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.118186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:05.203801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.149151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047895028049:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.149254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.149611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047895028059:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.149698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.489590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.530985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.568989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.602062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.642793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.688389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.743686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.789635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:07.868243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047895028928:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.868328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.868800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047895028933:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.868846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491047895028934:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.868963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:07.872970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:07.885023Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491047895028937:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:07.976440Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491047895028990:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:08.895139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491030715157243:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:08.895209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:10.526389Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2026-02-19T08:31:10.527113Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:7608491056 ... KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489883051, txId: 281474976715706] shutting down 2026-02-19T08:31:23.133778Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2026-02-19T08:31:25.363452Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:25.363606Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:25.436375Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:25.439931Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491121654970159:2081] 1771489885320208 != 1771489885320211 2026-02-19T08:31:25.450562Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:25.451013Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:25.478359Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:25.561266Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:25.561290Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:25.561299Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:25.561374Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:25.649829Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:26.004179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:26.013939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:26.022708Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.082192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.256620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.329926Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.456577Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:28.766720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491134539873917:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.766790Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.767130Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491134539873927:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.767165Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.844516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.882233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.921613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.959800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.996958Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.035764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.081950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.140610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:29.236106Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491138834842087:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.236217Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.237465Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491138834842092:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.237542Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491138834842093:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.237730Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:29.243097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:29.256211Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491138834842096:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:29.353132Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491138834842147:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:31.174251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.095111Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489892130, txId: 281474976710676] shutting down >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs >> KqpScan::Order [GOOD] >> KqpLimits::BigParameter >> KqpExplain::UpdateSecondaryConditional-UseSink-UseStreamIndex >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal >> KqpScan::DqSource [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpQuery::QueryCache >> KqpQuery::YqlSyntaxV0 >> KqpExplain::ExplainStream >> KqpExplain::LimitOffset >> KqpExplain::UpdateConditional+UseSink >> KqpStats::JoinNoStatsYql >> KqpQuery::RewriteIfPresentToMap >> KqpScan::PrunePartitionsByExpr >> KqpScan::LongStringCombiner [GOOD] >> KqpLimits::StreamWrite+Allowed >> KqpScan::Join2 [GOOD] >> KqpScan::EmptySet_3 [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: 2026-02-19T08:30:41.575135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490933711707440:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.576349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:41.809323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:41.839607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:41.839777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:41.891047Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:41.907220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:42.019042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:42.019061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:42.019076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:42.019149Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:42.026141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:42.457805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:42.463624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:42.513947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.585011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:42.650029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.801849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.866344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.629698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946596611148:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.629851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.630106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946596611158:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.630150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.993685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.026576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.056380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.087923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.123559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.156834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.202052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.253696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.326940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490950891579329:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.327010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.327227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490950891579334:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.327263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490950891579335:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.327381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.330884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:45.340285Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490950891579338:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:45.434605Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490950891579389:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:46.562165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490933711707440:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:46.562225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:46.849022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:47.082439Z node 1 :FLAT_TX_ ... 968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:29.595428Z node 7 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:29.611111Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:29.692932Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:29.810479Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:29.810510Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:29.810532Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:29.810666Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:30.411399Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:30.560793Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:30.571180Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:30.589677Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.674612Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.886169Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.022386Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.037809Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491163937133187:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.037926Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.038302Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491163937133196:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.038357Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.132504Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.178449Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.215370Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.258403Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.296385Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.366967Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.401379Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491142462294896:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:34.401461Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:34.401669Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.452608Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.545207Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491163937134071:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.545323Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.545936Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491163937134076:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.545977Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491163937134077:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.546044Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.550910Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:34.565897Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491163937134080:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:34.642081Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491163937134132:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":7,"FinishTimeMs":1771489897367,"TaskId":1,"Host":"ghrun-fkrokuvqke","ComputeTimeUs":137}],"CpuTimeUs":795}],"UseLlvm":"undefined","Introspections":["1 tasks for upsert\/delete in datashard"],"Tasks":1,"FinishedTasks":1,"Mkql":{"Mkql_TotalNodes":{"Count":1,"Sum":12,"Max":12,"Min":12}},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1771489897367,"CpuTimeUs":{"Count":1,"Sum":795,"Max":795,"Min":795,"History":[2,795]}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":405826,"CpuTimeUs":398146},"ProcessCpuTimeUs":2422,"TotalDurationUs":415790,"ResourcePoolId":"default","QueuedTimeUs":626},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.795,"A-Cpu":0.795,"Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:31:35.380252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:35.490781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:31:35.515428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:35.515533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:31:35.516178Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:31:35.809030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:35.809186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:35.894088Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489892604019 != 1771489892604023 2026-02-19T08:31:35.908820Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.959260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:36.058975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:31:36.348351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:36.362039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:36.466419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.462130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1135:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.462271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.462350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.463208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1150:2975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.463318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.467070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:38.491140Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:31:38.646017Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1149:2974], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:31:38.718747Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:1213:3018] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupByFullPk [GOOD] Test command err: 2026-02-19T08:31:22.375014Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491109545390962:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:22.375116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:22.747185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.797925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:22.798019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.870835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.877475Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.878429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491109545390726:2081] 1771489882330663 != 1771489882330666 2026-02-19T08:31:22.997787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:23.017971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:23.017992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:23.017998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:23.018095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:23.373841Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:23.556806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:23.563466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:23.616570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.740817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.909681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.986229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.853783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491122430294496:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.853875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.854210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491122430294506:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.854263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.191143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.222963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.255453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.287394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.317042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.354890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.393677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.451455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.524229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491126725262671:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.524305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.524588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491126725262676:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.524630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491126725262677:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.524722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.528505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:26.540784Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491126725262680:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:26.599767Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491126725262731:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:27.374599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491109545390962:2264];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.376086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:28.168152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:30.170056Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:30.240590Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:30.261588Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:30.261669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:30.272676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:30.354453Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:30.354477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:30.354485Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:30.354569Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:30.429048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:30.770870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:30.788863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.858356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.003104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.081349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.209153Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:33.510742Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491156401870879:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.510825Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.511053Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491156401870889:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.511083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.576037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.603632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.630449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.666043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.698083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.736819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.769470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.815663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.904884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491156401871760:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.905016Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.905032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491156401871765:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.905221Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491156401871767:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.905287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.908252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:33.917742Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491156401871768:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:34.005210Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491160696839116:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:35.130271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491143516967188:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:35.130364Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:35.404616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.663672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.044385Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896050, txId: 281474976710677] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateEmptySum [GOOD] Test command err: 2026-02-19T08:31:14.920907Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491075226347878:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:14.921445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:15.185905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:15.201124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:15.201242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:15.276760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:15.282161Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:15.284569Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491075226347833:2081] 1771489874914117 != 1771489874914120 2026-02-19T08:31:15.356996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:15.391923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:15.391947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:15.391954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:15.392045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:15.786961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:15.844073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:15.942988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:15.976872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.099297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:16.165932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.204590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491092406218895:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.204732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.205102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491092406218905:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.205162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.529784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.562067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.593377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.632250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.664872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.698614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.729642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.769190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:18.853155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491092406219778:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.853241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.853435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491092406219783:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.853465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491092406219784:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.853502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:18.857155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:18.869184Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491092406219787:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:18.964721Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491092406219840:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:19.921640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491075226347878:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:19.921738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:21.536367Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489881014, txId: 281474976710673] shutting down 2026-02-19T08:31:22.519871Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491111622896638:2218];send_to=[0:730719953 ... ing down 2026-02-19T08:31:29.745002Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491139699873904:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:29.745057Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:29.944165Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:29.969159Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:29.969247Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:29.972909Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:29.977912Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491139699873876:2081] 1771489889744201 != 1771489889744204 2026-02-19T08:31:30.015182Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:30.074207Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:30.074234Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:30.074244Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:30.074341Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:30.177844Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:30.521488Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:30.527972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:30.541081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.615307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.768752Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:30.781097Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.865853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.196332Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491156879744925:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.196427Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.196703Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491156879744934:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.196760Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.266841Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.301251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.332945Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.359994Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.391763Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.426966Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.463139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.514022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.584026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491156879745807:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.584116Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.584326Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491156879745813:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.584326Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491156879745812:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.584382Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.588385Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:33.598886Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491156879745816:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:33.691975Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491156879745867:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:34.749834Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491139699873904:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:34.749924Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:36.208481Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489895756, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] Test command err: 2026-02-19T08:31:23.033440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491115613396394:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:23.034173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:23.080094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:23.340221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:23.340346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:23.392076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:23.410574Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491115613396324:2081] 1771489883025595 != 1771489883025598 2026-02-19T08:31:23.419803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:23.421387Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:23.592298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:23.592323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:23.592329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:23.592436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:23.599940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:24.040892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:24.042376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:24.051857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:24.096712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.239019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.392036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.489342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.352578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491128498300091:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.352712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.353172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491128498300101:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.353261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.698361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.739577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.777120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.807271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.838160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.873277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.911419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.978916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.056371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491132793268267:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.056425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.056635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491132793268272:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.056641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491132793268273:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.056675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.060200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:27.071605Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491132793268276:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:27.132517Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491132793268327:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:28.031992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491115613396394:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:28.032055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... --------------------------------- 2026-02-19T08:31:29.734815Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489889288, txId: 281474976710673] shutting down 2026-02-19T08:31:30.613989Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:30.614073Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:30.674846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:30.674931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:30.689923Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491146068084732:2081] 1771489890546901 != 1771489890546904 2026-02-19T08:31:30.703095Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:30.713419Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:30.794359Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:30.794387Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:30.794396Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:30.794474Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:30.813804Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:31.250191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:31.258419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:31.276714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.362000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.507972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.552688Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:31.574680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.936603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491158952988507:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.936674Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.936965Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491158952988517:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.937019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.005858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.042976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.095282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.126804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.151625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.187153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.226680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.277670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.351351Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163247956683:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.351430Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.351505Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163247956688:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.351639Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163247956690:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.351687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.354688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:34.364585Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491163247956692:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:34.439714Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491163247956743:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2026-02-19T08:31:36.637872Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896309, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::TooManyComputeActors [GOOD] Test command err: 2026-02-19T08:31:24.271117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491118944624691:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:24.274856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:24.532596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:24.565204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:24.565517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:24.648873Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:24.653871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491118944624562:2081] 1771489884258652 != 1771489884258655 2026-02-19T08:31:24.657783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:24.801451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:24.801472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:24.801477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:24.801544Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:24.832933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:25.270992Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:25.297358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:25.362300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.516127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.688226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.757577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.454592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491131829528328:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.454704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.455034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491131829528338:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.455075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.739608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.779130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.821254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.863220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.894693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.928584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.968830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.025907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.115892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491136124496502:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.115969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.116280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491136124496507:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.116314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491136124496508:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.116418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:28.120164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:28.132814Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491136124496511:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:28.211953Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491136124496562:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:29.266482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491118944624691:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:29.267654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:29.920732Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489889960, txId: 281474976710673] shutting down 2026-02-19T08:31:30.867211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undeliv ... pp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:30.982824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:30.996277Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:31.126376Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:31.126400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:31.126407Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:31.126473Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:31.129810Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:31.551032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:31.559891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:31.574203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.634113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.812710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.857732Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:31.876035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.934715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491159293442668:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.934788Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.935008Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491159293442678:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.935042Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.000964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.032688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.060188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.093418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.126548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.190647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.226748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.272610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.348482Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163588410845:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.348598Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.348652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163588410850:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.348804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491163588410852:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.348867Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.351453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:34.360900Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491163588410853:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:34.459228Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491163588410905:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:35.988500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.197855Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=MTAzOTM3ZmMtN2UwNzQ3OWMtNjY0MTAxZjctMWRiZDQzNDU=, ActorId: [2:7608491172178346025:2560], ActorState: ExecuteState, LegacyTraceId: 01khtgd9rf8fhmmcg28edfsamx, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Requested too many execution units: 21" issue_code: 2029 severity: 1 } trace_id# 2026-02-19T08:31:37.198305Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489897240, txId: 281474976710675] shutting down
: Warning: Type annotation, code: 1030
:7:13: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:8:18: Warning: At function: AssumeColumnOrderPartial, At function: Aggregate, At function: Filter, At lambda, At function: Coalesce
:9:67: Warning: At function: And
:9:39: Warning: At function: <
:9:46: Warning: At function: -
:9:46: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
: Error: Requested too many execution units: 21, code: 2029 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSource [GOOD] Test command err: 2026-02-19T08:31:18.861892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491094568729860:2064];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:18.863713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:19.117951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:19.135306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:19.135409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:19.228361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:19.228819Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491094568729836:2081] 1771489878860637 != 1771489878860640 2026-02-19T08:31:19.241736Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:19.358115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:19.358141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:19.358148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:19.358233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:19.358599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:19.827483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:19.843872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:19.874933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:19.900358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.049356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.207227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.283741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.184155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111748600899:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.184289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.190739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111748600909:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.190830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.501989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.540205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.579615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.609903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.644464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.693308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.776910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.844765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.940225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111748601787:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.940325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.940622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111748601792:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.940659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111748601793:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.940702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.944054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:22.955436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-02-19T08:31:22.955619Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491111748601796:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:23.028665Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491116043569143:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:23.863088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491094568729860:2064];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:23.863161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:24.607130Z ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:33.848837Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:33.923386Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:33.924909Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491157348835868:2081] 1771489893831190 != 1771489893831193 2026-02-19T08:31:33.996313Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:33.996403Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:34.000508Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:34.078015Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:34.078040Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:34.078049Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:34.078146Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:34.133940Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:34.496048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:34.503228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:34.524818Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.608745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.761986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.839269Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:34.842511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.148119Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491174528706926:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.148220Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.148507Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491174528706936:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.148555Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.207342Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.237474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.263912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.291118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.319192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.349312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.382346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.422627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.490527Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491174528707801:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.490572Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491174528707806:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.490641Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.490811Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491174528707809:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.490857Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.493546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:37.502773Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491174528707808:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:37.567837Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491174528707861:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:38.833810Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491157348835905:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:38.833884Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:39.200214Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.873145Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489899907, txId: 281474976715675] shutting down >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] |95.1%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::LongStringCombiner [GOOD] Test command err: 2026-02-19T08:31:22.050114Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491110503898933:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:22.050171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:22.463864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:22.463941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.518479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.541106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.619307Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491106208931399:2081] 1771489881985038 != 1771489881985041 2026-02-19T08:31:22.623473Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.707306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:22.707326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:22.707344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:22.707437Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:22.828010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:23.053810Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:23.144467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:23.154497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:23.218977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.384225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.525630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.601989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.251718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491123388802465:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.251836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.252144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491123388802475:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.252192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.645979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.698974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.737135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.777478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.807121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.851509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.889400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.939467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.023769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491127683770641:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.023899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.024241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491127683770646:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.024251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491127683770647:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.024301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.027252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:26.037580Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491127683770650:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:26.109608Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491127683770703:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:27.049928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491110503898933:2265];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.049977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:27.680410Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491131978738301:2540], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 106 ... de: 2029 2026-02-19T08:31:34.452311Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491163401370612:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:34.454760Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:34.512674Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:34.552402Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:34.557891Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491163401370574:2081] 1771489894450714 != 1771489894450717 2026-02-19T08:31:34.582594Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:34.582677Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:34.594972Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:34.653166Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:34.653195Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:34.653205Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:34.653349Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:34.692243Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:35.082623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:35.089391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:35.104114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.162403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.307017Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.369789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.486935Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:37.487723Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491176286274331:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.487803Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.488003Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491176286274341:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.488047Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.543452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.568298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.588943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.609082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.632648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.695450Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.726842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.772585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.843235Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491176286275216:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.843326Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.843609Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491176286275222:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.843649Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491176286275221:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.843679Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.848179Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:37.863058Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491176286275225:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:37.918423Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491176286275276:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:39.452088Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491163401370612:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:39.452147Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:40.878821Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489899977, txId: 281474976715673] shutting down |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpAgg::AggHashShuffle+UseSink [GOOD] >> KqpAgg::AggHashShuffle-UseSink >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join2 [GOOD] Test command err: 2026-02-19T08:31:20.211649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491103879858026:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:20.211723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:20.245236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:20.525820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:20.525978Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:20.526095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:20.528557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:20.617662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491103879857810:2081] 1771489880197315 != 1771489880197318 2026-02-19T08:31:20.664254Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:20.770072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:20.845103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:20.845131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:20.845142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:20.845209Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:21.208064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:21.389234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:21.396937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:21.446961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.616932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.767959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.836589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.747908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491116764761583:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.748063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.748470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491116764761593:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.748514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.106375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.148682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.187808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.239014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.277938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.358610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.406897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.486489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.586314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121059729766:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.586396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.586719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121059729771:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.586755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491121059729772:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.586861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.590400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:24.606208Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491121059729775:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:24.699832Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491121059729826:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:25.216700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491103879858026:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:25.218045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:35.178535Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:35.192695Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:35.192797Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:35.201752Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.209831Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491164849457511:2081] 1771489895026133 != 1771489895026136 2026-02-19T08:31:35.216795Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:35.259574Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:35.259598Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:35.259606Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:35.259687Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:35.403805Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:35.672791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:35.687019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.741193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.942081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.018843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.032614Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.129852Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491177734361274:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.129949Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.130267Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491177734361284:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.130334Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.195451Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.230403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.257188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.289195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.319201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.351657Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.392471Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.438103Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.522290Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491177734362155:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.522401Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.523034Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491177734362160:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.523108Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491177734362161:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.523217Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.527470Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:38.543156Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-02-19T08:31:38.544418Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491177734362164:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:38.610381Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491177734362215:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:40.027615Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491164849457543:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.027692Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:40.265967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.217962Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489901167, txId: 281474976715675] shutting down >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet_3 [GOOD] Test command err: 2026-02-19T08:31:21.733181Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491105287027821:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:21.735741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:21.966390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:21.966475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.000693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.035522Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.040440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.189624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.199439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:22.199469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:22.199475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:22.199811Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:22.681293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.692646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.741831Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.744352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.934889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.104714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.186442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.995516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118171931527:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.995660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.996036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491118171931537:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:24.996109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.310766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.336862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.381203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.421508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.457876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.498394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.541697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.618538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.707399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491122466899702:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.707505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.707672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491122466899707:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.707698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491122466899708:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.707736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.711589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.724648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491122466899711:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:25.791310Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491122466899764:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:26.732099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491105287027821:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:26.732159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:28.197149Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489888203, txId: 281474976710673] shutting down 2026-02-19T08:31:28.988739Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491137071054905:2065];send_to=[0:7307199536658146131:776 ... PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:35.951034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:35.959231Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:35.981702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.046011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.266674Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.330435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.497928Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.726314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491180746933914:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.726443Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.726937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491180746933924:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.726986Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.805751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.841310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.871909Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.907286Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.959118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.003244Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.077513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.131701Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.225285Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491185041902092:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.225389Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.225778Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491185041902097:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.225819Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491185041902098:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.225933Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.230021Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:39.242338Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491185041902101:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:39.308712Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491185041902152:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:40.345658Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491167862030185:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.345737Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $key (DataType 'Uint64)) (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (OptionalType (DataType 'Uint64))) (DataType 'Int32)) (TupleType (OptionalType (OptionalType (DataType 'Uint64))) (DataType 'Int32)))))) (let $1 (DataType 'Uint64)) (let $2 (OptionalType $1)) (let $3 (OptionalType $2)) (let $4 (DataType 'Int32)) (let $5 '('"_partition_mode" '"single")) (let $6 '('('"_logical_id" '988) '('"_id" '"f8f0e705-526b6daf-365606fb-ff186843") $5)) (let $7 (DqPhyStage '() (lambda '() (block '( (let $31 '((Nothing $3) (Int32 '0))) (let $32 (TupleType $4 $3 $4)) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (If (== (Int32 '1) $key) (RangeCreate (AsList '($31 $31))) (List (ListType (TupleType $32 $32))))))))))) ))) $6)) (let $8 (DqCnValue (TDqOutput $7 '0))) (let $9 (KqpPhysicalTx '($7) '($8) '('('"$key")) '('('"type" '"compute")))) (let $10 (KqpTable '"/Root/EightShard" '"72057594046644480:39" '"" '1)) (let $11 '('"Key")) (let $12 '"%kqp%tx_result_binding_0_0") (let $13 (TupleType $3 $4)) (let $14 (TupleType (ListType (TupleType $13 $13)))) (let $15 '('('"UsedKeyColumns" $11) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $16 (KqpRowsSourceSettings $10 $11 '() %kqp%tx_result_binding_0_0 $15)) (let $17 (lambda '($33) $33)) (let $18 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $16)) $17 '('('"_logical_id" '1032) '('"_id" '"55b4c080-5c5fa2b5-97d00b03-735273fb")))) (let $19 (ListType (StructType '('"Key" $2)))) (let $20 '('('"_logical_id" '1053) '('"_id" '"6f28676b-12250e-5d1cfc04-5ac24986") $5)) (let $21 (DqPhyStage '() (lambda '() (Iterator (List $19))) $20)) (let $22 (DqCnUnionAll (TDqOutput $18 '0))) (let $23 (DqPhyStage '($22) $17 '('('"_logical_id" '1216) '('"_id" '"cdf5e80b-4e90e509-328ece9a-92516e35")))) (let $24 '($18 $21 $23)) (let $25 (DqCnResult (TDqOutput $23 '0) $11)) (let $26 (DqCnResult (TDqOutput $21 '0) $11)) (let $27 (KqpTxResultBinding $14 '0 '0)) (let $28 (KqpPhysicalTx $24 '($25 $26) '('($12 $27)) '('('"type" '"generic")))) (let $29 (KqpTxResultBinding $19 '1 '0)) (let $30 (KqpTxResultBinding $19 '1 '1)) (return (KqpPhysicalQuery '($9 $28) '($29 $30) '('('"type" '"query")))) ) >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::IsNotNullInJsonValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Unspecified [GOOD] Test command err: 2026-02-19T08:31:29.178251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491140378212909:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:29.178476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:29.441130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:29.473506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:29.473576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:29.533314Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:29.535454Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491140378212861:2081] 1771489889159799 != 1771489889159802 2026-02-19T08:31:29.552059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:29.673393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:29.673414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:29.673425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:29.673509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:29.690942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:30.037933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:30.044532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:30.094312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.192303Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:30.245572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.386080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.445339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.414659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491153263116629:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:32.414834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:32.416000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491153263116639:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:32.416051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:32.739239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.773113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.808473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.842849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.874523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.909848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.957786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.006253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.085768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491157558084802:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.085855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.086133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491157558084807:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.086189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491157558084808:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.086224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:33.089890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:33.101463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491157558084811:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:33.186956Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491157558084862:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:34.182199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491140378212909:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:34.182268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710675 SchemeShardStatus: 1 S ... our snapshot: [step: 1771489895238, txId: 281474976710673] shutting down 2026-02-19T08:31:36.071740Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491172760397059:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:36.072825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:36.101546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:36.188110Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:36.188192Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:36.190924Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:36.203767Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:36.270359Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:36.270382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:36.270391Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:36.270465Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:36.391815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:36.697006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:36.704407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.742256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.867881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.922240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.077019Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.986415Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491181350333462:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.986496Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.986928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491181350333472:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.986982Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.053346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.087808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.120320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.160812Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.202055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.292617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.337909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.394008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.478626Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491185645301647:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.478744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.478869Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491185645301652:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.478923Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491185645301653:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.479063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.483521Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:39.493811Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491185645301656:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:39.580176Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491185645301707:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:41.071656Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491172760397059:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.071725Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715675 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2026-02-19T08:31:41.754594Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489901741, txId: 281474976715673] shutting down >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpScan::SqlInParameter [GOOD] >> KqpScan::SqlInLiteral >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal >> KqpQuery::QueryClientTimeout >> KqpParams::RowsList >> KqpStats::RequestUnitForBadRequestExecute >> KqpExplain::PrecomputeRange >> KqpParams::ImplicitParameterTypes >> KqpLimits::WaitCAsStateOnAbort >> KqpTypes::UnsafeTimestampCastV0 >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal >> TSchemeShardViewTest::AsyncCreateDifferentViews >> KqpScan::SimpleWindow [GOOD] >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard >> KqpScan::AggregateWithFunction [GOOD] >> KqpQuery::Now ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: 2026-02-19T08:30:41.026966Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490933418184142:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.027133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:41.074256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:30:41.322490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:41.322634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:41.331879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:41.387859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:41.429994Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:41.437683Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490933418184109:2081] 1771489841024936 != 1771489841024939 2026-02-19T08:30:41.507603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:41.507631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:41.507644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:41.507734Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:41.547358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:42.050021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:42.086183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:42.091671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:44.097630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946303086895:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.097925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.098342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946303086905:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.098508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.367275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.468446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946303086997:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.468537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.468723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946303087002:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.468751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490946303087003:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.468779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.471998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:44.481325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490946303087006:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:30:44.603313Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490946303087057:2597] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:44.759510Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490946303087098:2355], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2026-02-19T08:30:44.760027Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ODVkZjc0ZGYtM2MzY2FlZDYtMzM3ZjBkNWEtOGFmZjhiOTc=, ActorId: [1:7608490946303086890:2326], ActorState: ExecuteState, LegacyTraceId: 01khtgbqg11bmnzkqrgdsk299y, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 14 } message: "At function: KiWriteTable!" end_position { row: 1 column: 14 } severity: 1 issues { position { row: 1 column: 14 } message: "Missing not null column in input: Value. All not null columns should be initialized" end_position { row: 1 column: 14 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:44.787786Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490946303087107:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:30:44.788269Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ODVkZjc0ZGYtM2MzY2FlZDYtMzM3ZjBkNWEtOGFmZjhiOTc=, ActorId: [1:7608490946303086890:2326], ActorState: ExecuteState, LegacyTraceId: 01khtgbqh0b87fgtbk1nym4sbv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 14 } message: "At function: KiWriteTable!" end_position { row: 1 column: 14 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value\':Null> to Struct<\'Key\':Uint64?,\'Value\':String>" end_position { row: 1 column: 47 } severity: 1 issues { position { row: 1 column: 47 } message: "Failed to convert \'Value\': Null to String" end_position { row: 1 column: 47 } severity: 1 } } issues { position { row: 1 column: 47 } message: "Failed to convert input columns types to scheme types" end_position { row: 1 column: 47 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:30:45.600980Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490950549176853:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:45.601089Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:45.627325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:45.714089Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:45.715432Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490950549176822:2081] 1771489845599929 != 1771489845599932 2026-02-19T08:30:45.749534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:45.749624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:45.753542Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:45.828313Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:45.828338Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe ... ion tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:34.072640Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:34.072718Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:34.092260Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:34.102204Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:34.140188Z node 8 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:31:34.150823Z node 8 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:31:34.196081Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:34.196103Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:34.196114Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:34.196211Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:34.221384Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:34.932618Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:34.953584Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:34.976072Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.050960Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.234811Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.394936Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.625171Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491180214276975:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.625292Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.625833Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491180214276984:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.625904Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.731705Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.778796Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.821784Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.867598Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.927205Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491158739438680:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:38.927307Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:38.947809Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.004858Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.047779Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.117702Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.235552Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491184509245161:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.235644Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.235816Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491184509245166:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.235869Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491184509245167:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.236004Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.240504Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:39.254593Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491184509245170:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:39.311576Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491184509245221:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:41.527657Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.009077Z node 8 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489902042, txId: 281474976710675] shutting down 2026-02-19T08:31:42.246794Z node 8 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489902280, txId: 281474976710677] shutting down 2026-02-19T08:31:42.475292Z node 8 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489902504, txId: 281474976710679] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtScanQuery [GOOD] Test command err: 2026-02-19T08:30:17.158620Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490832740516982:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:17.158710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:17.403910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:17.432262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:17.432350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:17.500345Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:17.504158Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490832740516727:2081] 1771489817128910 != 1771489817128913 2026-02-19T08:30:17.520135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:17.578815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:17.614431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:17.614452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:17.614462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:17.614558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:18.023821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:18.080607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:18.127640Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:18.210915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:18.357929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:18.411337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.195281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490845625420503:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.195390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.195800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490845625420513:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.195838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.554710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.584457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.623068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.661265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.692065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.774663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.816736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.865481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:20.945832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490845625421383:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.945926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.946286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490845625421388:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.946345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490845625421389:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.946407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:20.951949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:20.965269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490845625421392:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:21.091046Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490849920388739:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:22.155703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490832740516982:2268];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:22.155750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:22.321806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:22.569903Z no ... ptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:33.983542Z node 10 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:33.987406Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7608491156584864006:2081] 1771489893863421 != 1771489893863424 2026-02-19T08:31:34.010184Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:34.010320Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:34.030968Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:34.095300Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:34.095328Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:34.095339Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:34.095440Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:34.270400Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:34.623024Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:34.633124Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.695536Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.876156Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:34.940339Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.030796Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.556559Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7608491178059702361:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.556692Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.557287Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7608491178059702371:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.557354Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.659624Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.707769Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.763466Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.810761Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.855725Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.865335Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7608491156584864047:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:38.865428Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:38.935200Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.986319Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.055069Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.186177Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7608491182354670545:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.186331Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.187101Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7608491182354670551:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.187172Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7608491182354670550:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.187330Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.192134Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:39.211112Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7608491182354670554:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:39.314849Z node 10 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [10:7608491182354670605:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:41.661726Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.759063Z node 10 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489902798, txId: 281474976710675] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: 2026-02-19T08:30:41.569762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490935577688522:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:41.569822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:41.793445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:41.802677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:41.802764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:41.897553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:41.926667Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:41.932628Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490935577688357:2081] 1771489841561974 != 1771489841561977 2026-02-19T08:30:41.962065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:42.090857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:42.090879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:42.090885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:42.090976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:42.512043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:42.521019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:42.558005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.578541Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:42.726895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.872743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:42.929880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.757745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490948462592129:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.757868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.758172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490948462592139:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:44.758232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.156740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.199000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.232522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.264787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.296238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.328108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.363870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.427903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.504352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490952757560305:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.504531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.504630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490952757560310:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.504688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490952757560312:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.504722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.508328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:45.520191Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490952757560314:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:45.597595Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490952757560365:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:46.569508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490935577688522:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:46.569584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:47.222274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... : PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:33.474566Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:33.474602Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:33.474614Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:33.474729Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:34.121018Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:34.137072Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:34.148602Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.238800Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.242236Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:34.433777Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.517045Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.782088Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491173911284004:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.782207Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.782475Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491173911284013:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.782530Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:37.891920Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.936670Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.976253Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.014039Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.051656Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.091944Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.141806Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.200266Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.246856Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7608491156731413096:2180];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:38.249103Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:38.328493Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491178206252183:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.328595Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.328669Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491178206252188:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.328890Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7608491178206252190:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.328946Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:38.333007Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:38.346480Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7608491178206252191:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:38.411564Z node 7 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [7:7608491178206252244:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:40.857014Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.175769Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.387416Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.606331Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.043193Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpParams::MissingParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: 2026-02-19T08:30:39.154534Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490927712787849:2179];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:39.154813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:39.370027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:39.411920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:39.412047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:39.447258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:39.449966Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:39.549028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:39.549054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:39.549060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:39.549149Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:39.593796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:39.946645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:39.956906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:40.157012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:42.228166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490940597690474:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:42.228315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:42.228704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490940597690486:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:42.228758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490940597690487:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:42.228892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:42.232700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:42.244140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490940597690490:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:30:42.349195Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490940597690541:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:43.439978Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490942009694181:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:43.440070Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:43.452739Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:43.590891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:43.590964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:43.605636Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608490942009694145:2081] 1771489843439136 != 1771489843439139 2026-02-19T08:30:43.622575Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:43.625690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:43.681543Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:43.702387Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:43.702414Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:43.702420Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:43.702488Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:44.112040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:44.449786Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:46.384493Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490954894596921:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.384588Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.385028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490954894596930:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.385083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.422779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.548277Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490954894597027:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.548353Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.548425Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490954894597032:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.548554Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608490954894597034:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.548615Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.551842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:46.586418Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608490954894597036:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for ... { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:34.038127Z node 7 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037892 Cancelled read: {[7:7608491164140190000:2549], 2} 2026-02-19T08:31:35.390098Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491168186566817:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:35.393473Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:35.412108Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:35.506057Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:35.508202Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491168186566770:2081] 1771489895382614 != 1771489895382617 2026-02-19T08:31:35.535580Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:35.535837Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:35.556895Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:35.630490Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:35.630513Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:35.630522Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:35.630598Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:35.696794Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:36.274060Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:36.284706Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.355237Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.474860Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:36.525729Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:36.607159Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.964509Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491185366437832:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.964631Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.964951Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491185366437842:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:39.965006Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.035711Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.073477Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.111013Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.148309Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.186869Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.226383Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.266442Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.342700Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.395934Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491168186566817:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.396211Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:40.444971Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491189661406005:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.445080Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.445389Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491189661406010:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.445455Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491189661406011:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.445602Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.451087Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:40.463865Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491189661406014:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:40.546847Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491189661406067:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpExplain::UpdateSecondaryConditional+UseSink-UseStreamIndex >> KqpScan::UnionAggregate [GOOD] >> KqpNamedExpressions::NamedExpressionRandomDataQuery+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomDataQuery-UseSink >> KqpScan::PrunePartitionsByExpr [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SimpleWindow [GOOD] Test command err: 2026-02-19T08:31:22.960931Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491112110956694:2245];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:22.961092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:23.226100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:23.257981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:23.258111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:23.337815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491112110956476:2081] 1771489882932838 != 1771489882932841 2026-02-19T08:31:23.344604Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:23.355406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:23.458002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:23.498524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:23.498567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:23.498577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:23.498684Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:23.974172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:24.024211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:24.034669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:24.084329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.238469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.415069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:24.497601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.479786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491129290827532:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.479894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.480378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491129290827542:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.480424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:26.777978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.814859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.845324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.874098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.907310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.950910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:26.988175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.029810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:27.094012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491133585795708:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.094090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.094174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491133585795713:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.094219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491133585795715:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.094267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:27.097545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:27.108162Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491133585795717:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:27.177273Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491133585795768:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:27.960599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491112110956694:2245];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.960665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:28.656019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... 9T08:31:36.910375Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896946, txId: 281474976710675] shutting down 2026-02-19T08:31:37.737247Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491176848719204:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:37.737308Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:37.749779Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:37.817989Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:37.819628Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491176848719173:2081] 1771489897736357 != 1771489897736360 2026-02-19T08:31:37.857922Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:37.858014Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:37.859757Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:37.900853Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:37.900881Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:37.900890Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:37.900970Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:37.928139Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:38.442953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.464610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.547221Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.788020Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.797132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.874877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.318629Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491194028590227:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.318703Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.319090Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491194028590236:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.319131Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.387652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.430587Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.458952Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.498883Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.535416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.571316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.604998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.644715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.729107Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491194028591107:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.729193Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.729417Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491194028591112:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.729476Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491194028591113:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.729592Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.733671Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.747193Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491194028591116:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:41.843474Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491194028591169:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:42.738785Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491176848719204:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.738857Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:44.067420Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489904093, txId: 281474976710673] shutting down >> KqpNewEngine::DqSourceLocksEffects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateWithFunction [GOOD] Test command err: 2026-02-19T08:31:19.498366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491098667749820:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:19.498808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:19.797915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:19.869085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:19.869165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:19.891841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:19.906719Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:19.907366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491098667749779:2081] 1771489879493663 != 1771489879493666 2026-02-19T08:31:20.044196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:20.044221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:20.044227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:20.044335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:20.059622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:20.500896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:20.512066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:20.596395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.754778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:20.954430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:21.038313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.926816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111552653550:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.926943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.927331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491111552653560:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:22.927387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.263862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.301653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.337045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.371065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.405653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.438655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.515474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.575431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.681442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491115847621734:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.681543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.681889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491115847621739:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.681953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491115847621740:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.682002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:23.685625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:23.702330Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491115847621743:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:23.776396Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491115847621794:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:24.505830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491098667749820:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:24.505911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:28.240691Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489886425, txId: 281474976710673] shutting down 2026-02-19T08:31:29.258161Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491142015136457:2194];send_to=[0:730719953 ... ing down 2026-02-19T08:31:37.657010Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491174896859108:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:37.657065Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:37.668309Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:37.735383Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:37.737199Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491174896859073:2081] 1771489897656181 != 1771489897656184 2026-02-19T08:31:37.772144Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:37.772208Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:37.773321Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:37.838219Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:37.838243Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:37.838252Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:37.838338Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:37.948614Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:38.217611Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.225182Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:38.236445Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.298700Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.456836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.524770Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.674161Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:40.953846Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491187781762834:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.953948Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.958012Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491187781762845:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.958098Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.021091Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.061246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.137139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.172113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.230494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.290075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.329048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.378102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.481841Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192076731011:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.481924Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.482347Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192076731016:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.482391Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192076731017:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.482505Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.486323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.499911Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491192076731020:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:41.572494Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491192076731071:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:42.657396Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491174896859108:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.657479Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:44.252597Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489903792, txId: 281474976710673] shutting down >> KqpScan::Join3Tables [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::FullOuterJoin >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> KqpQuery::QuerySkipHasNoColumns [GOOD] >> KqpQuery::QueryResultsTruncated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:45.394503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:45.394602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:45.394669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:45.394704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:45.394746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:45.394803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:45.394832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:45.394860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:45.394928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:45.395001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:45.395816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:45.402764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:45.507951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:45.508013Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.523744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:45.526745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:45.526954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:45.550538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:45.558097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:45.558902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:45.559292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:45.570097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:45.570344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:45.571670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:45.571739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:45.571848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:45.571907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:45.571956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:45.572147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:45.736351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.737891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:45.738497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:31:46.726861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:31:46.726893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:31:46.726925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:31:46.726991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:31:46.729323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:31:46.730240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-02-19T08:31:46.730578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:31:46.730625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-02-19T08:31:46.730737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:31:46.730762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-02-19T08:31:46.730840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:31:46.730864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:31:46.731269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:31:46.731472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:31:46.731515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:736:2724] 2026-02-19T08:31:46.731804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:31:46.731896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:31:46.731973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:31:46.732000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:736:2724] 2026-02-19T08:31:46.732095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:31:46.732117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:736:2724] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-02-19T08:31:46.732561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:46.732765Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 219us result status StatusSuccess 2026-02-19T08:31:46.733283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:46.733827Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:46.734065Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 180us result status StatusSuccess 2026-02-19T08:31:46.734428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:46.734912Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:31:46.735191Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 209us result status StatusSuccess 2026-02-19T08:31:46.735517Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::UnionAggregate [GOOD] Test command err: 2026-02-19T08:31:32.220953Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491153927505847:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:32.221663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:32.462884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:32.462982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:32.501348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:32.524118Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491153927505816:2081] 1771489892219024 != 1771489892219027 2026-02-19T08:31:32.538698Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:32.543544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:32.628435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:32.628461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:32.628471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:32.628563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:32.785359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:33.039814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:33.046408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:33.099120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.225213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.230247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:33.378220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:33.445383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.254618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491166812409583:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.254740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.256257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491166812409593:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.256326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.520033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.545929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.577654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.609899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.634114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.668437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.707951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.765479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.845425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491166812410463:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.845546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.845835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491166812410468:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.845876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491166812410469:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.845979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.850682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:35.862803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491166812410472:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:35.931733Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491166812410523:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:37.221221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491153927505847:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:37.221284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:37.618936Z node 1 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [1:7608491175402345501:2055], tablet: [1:7608491158222474132:2317], scanId: 5, table: /Root/EightShar ... 02-19T08:31:38.486947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:38.498535Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:38.588745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:38.589072Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:38.590213Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491178699386286:2081] 1771489898485920 != 1771489898485923 2026-02-19T08:31:38.612464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:38.612544Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:38.623231Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:38.757552Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:38.757575Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:38.757583Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:38.757655Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:38.793413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:39.156916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:39.163176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:39.176526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.247861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.394741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.472797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.584647Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:41.520027Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491191584290043:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.520144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.520407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491191584290053:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.520450Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.601537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.638039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.722445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.755430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.784724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.815156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.845803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.888654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.959744Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491191584290925:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.959815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.959888Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491191584290930:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.960018Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491191584290932:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.960062Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.963256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.974271Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491191584290934:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:42.068517Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491195879258281:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:43.487097Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491178699386317:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:43.487167Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:45.409182Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489904520, txId: 281474976710673] shutting down >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheTtl >> KqpExplain::UpdateConditional+UseSink [GOOD] >> KqpExplain::UpdateConditional-UseSink >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [GOOD] >> KqpNewEngine::IndexAutochooserTopSortDisabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::PrunePartitionsByExpr [GOOD] Test command err: 2026-02-19T08:31:27.238916Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491133747596626:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:27.240299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:27.264538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:27.525469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:27.525562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:27.597681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:27.608335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:27.638805Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:27.644547Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491133747596596:2081] 1771489887237112 != 1771489887237115 2026-02-19T08:31:27.767238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:27.767258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:27.767271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:27.767365Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:27.784103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:28.218340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:28.247082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:28.293051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.435992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.606102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:28.681242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.616763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491146632500359:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.616901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.620208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491146632500369:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.620281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:30.921064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:30.973692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.008033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.040902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.069523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.111183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.153119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.214322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:31.297771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491150927468534:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.297875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.298019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491150927468539:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.298063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491150927468541:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.298171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:31.303399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:31.313528Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491150927468543:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:31.396789Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491150927468594:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:32.239149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491133747596626:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:32.239244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:33.132712Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489893166, tx ... 9T08:31:38.684965Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489898682, txId: 281474976715673] shutting down 2026-02-19T08:31:39.688295Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491184394512662:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:39.688425Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:39.711120Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:39.814504Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:39.817445Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491184394512630:2081] 1771489899686698 != 1771489899686701 2026-02-19T08:31:39.832985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:39.833073Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:39.853546Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:39.871337Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:39.900039Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:39.900059Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:39.900069Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:39.900146Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:40.264327Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:40.281205Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.343064Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.539746Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.609928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.724142Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:43.157869Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491201574383686:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.158012Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.159390Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491201574383696:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.159492Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.216513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.289737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.334163Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.363800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.399254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.433283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.472579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.521822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.600937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491201574384568:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.601044Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.601937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491201574384573:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.601995Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491201574384574:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.602035Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.605489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:43.618233Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491201574384577:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:43.690435Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491201574384628:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:44.688913Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491184394512662:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:44.688994Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:45.603526Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489905626, txId: 281474976715673] shutting down >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::UdfTerminate >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinNoStatsScan >> KqpMergeCn::SortBy_Int32 [GOOD] |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::Join3Tables [GOOD] Test command err: 2026-02-19T08:31:21.687956Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491106894841495:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:21.688013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:22.011692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:22.011788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:22.030797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:22.040367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:22.093912Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:22.097950Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491106894841465:2081] 1771489881685013 != 1771489881685016 2026-02-19T08:31:22.156769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:22.156810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:22.156825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:22.156905Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:22.315857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:22.627418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:22.638516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:22.708978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:22.720350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:22.888667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.044410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:23.121587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.029057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124074712522:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.029168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.035418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124074712531:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.035487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.349941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.385207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.423321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.458141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.532118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.575419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.618181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.700309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:25.782916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124074713410:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.782993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.783309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124074713415:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.783351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491124074713416:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.783450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:25.787074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:25.799250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491124074713419:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:25.888715Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491124074713470:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:26.689825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491106894841495:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:26.689923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:27.408743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... eScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:38.223375Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:38.224552Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491179330880408:2081] 1771489898088369 != 1771489898088372 2026-02-19T08:31:38.237439Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:38.237750Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:38.262011Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:38.314578Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:38.314601Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:38.314610Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:38.314700Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:38.349960Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:38.798894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.804960Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:38.812368Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.872424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.073600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.118383Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:39.144479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.353297Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192215784169:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.353394Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.353703Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192215784179:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.353797Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.430079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.464639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.501697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.542716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.579518Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.659906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.737488Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.785724Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.866044Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192215785054:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.866129Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.866311Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192215785059:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.866348Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491192215785060:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.866388Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.871127Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.887969Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491192215785063:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:41.944199Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491192215785114:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:43.097826Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491179330880518:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:43.098599Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:43.629960Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.022618Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489905031, txId: 281474976715675] shutting down 2026-02-19T08:31:46.211888Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489906228, txId: 281474976715677] shutting down |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: 2026-02-19T08:30:43.270227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490942884415881:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:43.272388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:43.481781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:43.504818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:43.504910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:43.560113Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490942884415824:2081] 1771489843264529 != 1771489843264532 2026-02-19T08:30:43.580198Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:43.585668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:43.724842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:43.724863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:43.724870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:43.725253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:43.760598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:44.128395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:44.179177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.275070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:44.312490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.441547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:44.506002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.292622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490955769319591:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.292721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.293656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490955769319601:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.293723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.562165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.591268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.621284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.648961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.676352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.707904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.740819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.785653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.853551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490955769320466:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.853629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.853686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490955769320471:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.853805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490955769320473:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.853843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.857153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:46.868330Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490955769320475:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:46.932652Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490955769320526:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:48.266678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490942884415881:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:48.266744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:49.562090Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490969526649777:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:49.562142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... 80, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.653686Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.777647Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:37.822595Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:37.901208Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.599234Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491193051407594:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.599345Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.599727Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491193051407604:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.599790Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.697644Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.746118Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.760942Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491171576569273:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.761022Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:41.789026Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.840299Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.886935Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.937814Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.985022Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.045330Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.131202Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491197346375775:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.131316Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.131383Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491197346375780:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.131548Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491197346375782:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.131596Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.135784Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:42.147818Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491197346375784:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:42.217278Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491197346375835:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.065999Z node 8 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2026-02-19T08:31:45.079863Z node 8 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:31:45.080088Z node 8 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:31:45.080346Z node 8 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [8:7608491210231278114:2540], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [8:7608491205936310748:2540]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[8:7608491210231278114:2540].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-02-19T08:31:45.081030Z node 8 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [8:7608491210231278108:2540], SessionActorId: [8:7608491205936310748:2540], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[8:7608491205936310748:2540]. 2026-02-19T08:31:45.081367Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=8&id=YmU3OGMwYjgtY2NiYWQ0ODEtZDZkNjc3OWMtMmMzYmJlZg==, ActorId: [8:7608491205936310748:2540], ActorState: ExecuteState, LegacyTraceId: 01khtgdjaa8tk3cw336dd402kj, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [8:7608491210231278109:2540] from: [8:7608491210231278108:2540] trace_id# 2026-02-19T08:31:45.081505Z node 8 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [8:7608491210231278109:2540] TxId: 281474976715675. Ctx: { TraceId: 01khtgdjaa8tk3cw336dd402kj, Database: /Root, SessionId: ydb://session/3?node_id=8&id=YmU3OGMwYjgtY2NiYWQ0ODEtZDZkNjc3OWMtMmMzYmJlZg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ] trace_id# 2026-02-19T08:31:45.081971Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=8&id=YmU3OGMwYjgtY2NiYWQ0ODEtZDZkNjc3OWMtMmMzYmJlZg==, ActorId: [8:7608491205936310748:2540], ActorState: ExecuteState, LegacyTraceId: 01khtgdjaa8tk3cw336dd402kj, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TwoShard`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table >> KqpExplain::UpdateSecondaryConditional-UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink-UseStreamIndex >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> TNodeQueryTextCacheTest::AddAndGet [GOOD] >> TNodeQueryTextCacheTest::SameSpanIdDifferentTextUpdatesIndex [GOOD] >> TNodeQueryTextCacheTest::GetMissingReturnsEmpty [GOOD] >> TNodeQueryTextCacheTest::IgnoresZeroSpanIdAndEmptyQueryText [GOOD] >> TNodeQueryTextCacheTest::EvictsOldestEntryWhenFull [GOOD] |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest |95.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::IndexAutochooserTopSortDisabled [GOOD] Test command err: 2026-02-19T08:30:37.264696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490916402661179:2063];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:37.264877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:37.535653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:37.560989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:37.561108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:37.603589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:37.633595Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:37.634797Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490916402661157:2081] 1771489837262987 != 1771489837262990 2026-02-19T08:30:37.712217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:37.712240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:37.712247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:37.712337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:37.729422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:38.112875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:38.156625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.272622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:38.316995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.488673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:38.551998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.362519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490929287564921:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.362633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.363020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490929287564931:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.363108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:40.677238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.706113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.740275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.771981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.809160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.880385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.940044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:40.988315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.057128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490933582533099:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.057202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.057502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490933582533105:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.057524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490933582533104:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.057544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.061435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:41.075712Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490933582533108:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:41.147806Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490933582533159:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:42.266158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490916402661179:2063];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:42.266225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:44.152286Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490946188073332:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:44.152402Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... tion type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.568018Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.791028Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.892670Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.405624Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491179356047596:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:43.405690Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:43.434071Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491200830885921:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.434211Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.434595Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491200830885931:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.434657Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.522380Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.564808Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.602565Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.638371Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.677738Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.718593Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.759459Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.817583Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.956038Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491200830886799:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.956172Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.956731Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491200830886804:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.956796Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491200830886805:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.956973Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.962685Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:43.984653Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491200830886808:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:44.051226Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491205125854155:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:46.458389Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["my_table"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/my_table","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"my_table","ReadColumns":["id (-∞, +∞)","a","b","c"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"TopSort","Limit":"10","TopSortBy":"[row.a,row.b]"}],"Node Type":"TopSort"}],"Node Type":"Merge","SortColumns":["a (Asc)","b (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/my_table","reads":[{"columns":["a","b","c","id"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"my_table","ReadColumns":["id (-∞, +∞)","a","b","c"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"10","TopSortBy":"[row.a,row.b]"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ( (let $1 (KqpTable '"/Root/my_table" '"72057594046644480:54" '"" '1)) (let $2 '('"a" '"b" '"c" '"id")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (Uint64 '10)) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"a" $5) '('"b" $5) '('"c" $5) '('"id" (OptionalType (DataType 'Uint64))))) (let $7 '('('"_logical_id" '406) '('"_id" '"46e3fb2-624dbc9c-88e361f0-810a4cef") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (Bool 'true)) (let $14 (TopSort (ToFlow $12) $4 '($13 $13) (lambda '($16) '((Member $16 '"a") (Member $16 '"b"))))) (let $15 (lambda '($17) (Member $17 '"a") (Member $17 '"b") (Member $17 '"c") (Member $17 '"id"))) (return (FromFlow (ExpandMap $14 $15))) ))) $7)) (let $9 (DqCnMerge (TDqOutput $8 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $10 (DqPhyStage '($9) (lambda '($18) (FromFlow (NarrowMap (Take (ToFlow $18) $4) (lambda '($19 $20 $21 $22) (AsStruct '('"a" $19) '('"b" $20) '('"c" $21) '('"id" $22)))))) '('('"_logical_id" '419) '('"_id" '"758c7821-2ca57fef-e1680ed6-9a3b79fb")))) (let $11 (DqCnResult (TDqOutput $10 '0) '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $6) '0 '0)) '('('"type" '"query")))) ) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: 2026-02-19T08:30:38.067595Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490923665220297:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:38.070295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:38.281814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:38.318098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:38.318246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:38.374080Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:38.375396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:38.377471Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490923665220264:2081] 1771489838065417 != 1771489838065420 2026-02-19T08:30:38.526054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:38.538239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:38.538271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:38.538284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:38.538360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:38.981809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:38.986744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:30:39.045027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.100248Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:39.167313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.313196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:39.374885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.175689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490936550124037:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.175777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.176123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490936550124047:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.176162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.515961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.543289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.570829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.597821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.622282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.652477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.689898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.737602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:41.837973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490936550124911:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.838049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.838125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490936550124916:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.838346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490936550124918:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.838385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:41.841330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:41.850647Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490936550124919:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:41.905611Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490936550124971:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:43.069817Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490923665220297:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:43.069876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:43.510759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:37.799056Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:37.881878Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:37.883980Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491175374643595:2081] 1771489897783677 != 1771489897783680 2026-02-19T08:31:37.910917Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:37.911000Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:37.926857Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:37.980945Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:37.994558Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:37.994604Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:37.994615Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:37.994732Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:38.599372Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.608682Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:38.623143Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.700337Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.861312Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.928326Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.021838Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.700989Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491196849481949:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.701090Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.701434Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491196849481958:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.701484Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:42.785838Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491175374643627:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.785935Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:42.816094Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.855057Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.893229Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.950714Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.005387Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.075340Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.133122Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.207382Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.302157Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491201144450128:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.302276Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.302595Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491201144450134:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.302660Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491201144450133:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.302712Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.307964Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:43.321761Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491201144450137:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:43.420901Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491201144450188:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.880258Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.322472Z node 8 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489907355, txId: 281474976715675] shutting down >> TNodeQueryTextCacheTest::MultipleDistinctEntries [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::AddAndGet [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::IgnoresZeroSpanIdAndEmptyQueryText [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::SameSpanIdDifferentTextUpdatesIndex [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::EvictsOldestEntryWhenFull [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::GetMissingReturnsEmpty [GOOD] >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::MultipleDistinctEntries [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpScan::SqlInLiteral [GOOD] >> KqpParams::RowsList [GOOD] >> KqpParams::ParameterTypes >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> TNodeQueryTextCacheTest::DeduplicatesConsecutiveIdenticalPairs [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/common/ut/unittest >> TNodeQueryTextCacheTest::DeduplicatesConsecutiveIdenticalPairs [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> KqpQuery::Now [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> KqpReturning::Random [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink |95.2%| [TA] $(B)/ydb/core/kqp/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/kqp/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/scan/unittest >> KqpScan::SqlInLiteral [GOOD] Test command err: 2026-02-19T08:31:31.193890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491148468116794:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:31.194221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:31.431676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:31.465928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:31.466062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:31.538509Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:31.541428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:31.674457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:31.674480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:31.674488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:31.674579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:31.705099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:32.102912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:32.151490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.201377Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:32.264505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.404467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:32.476058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.460600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491161353020406:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.460729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.461041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491161353020416:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.461081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:34.798639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.834168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.864808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.892607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.922829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:34.970034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.009776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.070106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:35.140998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491165647988580:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.141077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.141290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491165647988585:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.141329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491165647988586:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.141462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:35.144781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:35.156127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491165647988589:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:35.232017Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491165647988642:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:36.190233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491148468116794:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:36.190290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:36.673021Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896708, txId: 281474976710673] shutting down 2026-02-19T08:31:36.776159Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896813, txId: 281474976710675] shutting down 2026-02-19T08:31:36.893752Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489896932, txId: 281474976710677] s ... ing down 2026-02-19T08:31:44.509474Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491206454214208:2196];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:44.509760Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:44.532347Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:44.626752Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:44.628567Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:44.628634Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:44.629014Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491206454214041:2081] 1771489904501186 != 1771489904501189 2026-02-19T08:31:44.662323Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:44.758208Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:44.758227Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:44.758234Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:44.758315Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:44.815805Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.342923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:45.350341Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:45.360479Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.462261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.559338Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:45.613737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.701750Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.016117Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491223634085092:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.016212Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.016576Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491223634085101:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.016641Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.094161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.134300Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.177745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.208653Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.242614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.279643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.318572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.363639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.457923Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491223634085967:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.458054Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.458575Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491223634085972:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.458626Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491223634085973:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.458736Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.462053Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:48.472510Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491223634085976:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:48.574237Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491223634086027:3758] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:49.506359Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491206454214208:2196];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:49.506435Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:50.671962Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771489910708, txId: 281474976715674] shutting down |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal |95.2%| [TA] $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::CurrentUtcTimestamp |95.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpAgg::AggHashShuffle-UseSink [GOOD] >> KqpAgg::AggWithSqlIn >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::MergeConnection >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::ParallelCreate >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpExplain::UpdateSecondaryConditional+UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditional-UseSink+UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditional+UseSink+UseStreamIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: 2026-02-19T08:29:55.937577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490736178998389:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:55.937636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:56.154402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:56.154466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:56.183182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:56.201175Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:56.203436Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490736178998347:2081] 1771489795935584 != 1771489795935587 2026-02-19T08:29:56.216934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:56.290229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:56.290247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:56.290253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:56.290312Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:56.352522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:56.644809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:56.650485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:29:56.700393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:56.819300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:56.944470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:56.946765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:57.014344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.637252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490749063902105:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.637369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.637759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490749063902115:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.637824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.984009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.010196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.036592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.064370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.090068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.120231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.177140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.218147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:59.284441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490753358870280:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.284526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.284669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490753358870285:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.284729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490753358870287:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.284771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:59.288390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:59.298562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490753358870289:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:59.357788Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490753358870340:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:00.937576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490736178998389:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:00.937644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Wa ... 196798092131:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.802052Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:42.813875Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:42.934729Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:42.941897Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491196798092081:2081] 1771489902756916 != 1771489902756919 2026-02-19T08:31:42.952980Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:42.953118Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:42.981607Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:43.034121Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:43.057850Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:43.057879Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:43.057890Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:43.058001Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:43.773898Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:43.791506Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:43.799029Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:43.812946Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.947009Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.135694Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.217522Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.765352Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491196798092131:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:47.765455Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:47.815324Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491218272930437:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.815510Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.818334Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491218272930448:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.818445Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.922919Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.976512Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.025832Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.075592Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.124990Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.170832Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.217995Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.281566Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.456961Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491222567898616:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.457098Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.457813Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491222567898621:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.457916Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491222567898622:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.458055Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.462292Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:48.476948Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491222567898625:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:48.568093Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491222567898676:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.913461Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UpdateWhereInSubquery >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::DropTwice |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> TNetClassifierTest::TestInitFromFile |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource >> TSchemeShardTest::MkRmDir >> TSchemeShardTest::CreateTable >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn >> TSchemeShardTest::Boot >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::JoinStatsBasicScan >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> KqpExplain::PureExpr [GOOD] >> KqpExplain::MultiUsedStage >> TSchemeShardTest::RmDirTwice >> KqpNewEngine::StaleRO_IndexFollowers-EnableFollowers [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink-UseStreamIndex >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SelfJoin >> KqpQuery::CreateAsSelect_DisableDataShard [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpParams::MissingOptionalParameter-UseSink >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TNetClassifierTest::TestInitFromFile [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TBackupCollectionTests::DropEmptyBackupCollection [GOOD] >> TBackupCollectionTests::DropNonExistentCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2026-02-19T08:31:55.630063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491251309929347:2143];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:55.634757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:55.953555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:55.953685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:55.972763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:31:55.992601Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:55.993495Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491251309929240:2081] 1771489915621997 != 1771489915622000 2026-02-19T08:31:56.019374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:56.078172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0041c5/r3tmp/yandexm1EKoA.tmp 2026-02-19T08:31:56.078260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0041c5/r3tmp/yandexm1EKoA.tmp 2026-02-19T08:31:56.082039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0041c5/r3tmp/yandexm1EKoA.tmp 2026-02-19T08:31:56.082153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:56.244821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:31:56.642093Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2026-02-19T08:31:55.627799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491251097325023:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:55.628275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:55.916602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:31:55.953553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:55.953661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:56.007528Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:56.012667Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491251097324993:2081] 1771489915622195 != 1771489915622198 2026-02-19T08:31:56.022913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:56.078319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0041bc/r3tmp/yandexZokWUH.tmp 2026-02-19T08:31:56.078365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0041bc/r3tmp/yandexZokWUH.tmp 2026-02-19T08:31:56.081873Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2026-02-19T08:31:56.081932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/ynks/0041bc/r3tmp/yandexZokWUH.tmp 2026-02-19T08:31:56.082063Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2026-02-19T08:31:55.644740Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491252441398253:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:55.646457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:55.937805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:31:55.988874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:55.988992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:56.022409Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:56.023586Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:32308) connection closed with error: Connection refused 2026-02-19T08:31:56.029777Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-02-19T08:31:56.035522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:56.084176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:56.084214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:56.084221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:56.084315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:56.174023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:31:56.651057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectWhereInSubquery >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed |95.2%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TBackupCollectionTests::DropNonExistentCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithMultipleBackups >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CreateAlterTableWithCacheMode >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> KqpExplain::UpdateSecondaryConditional+UseSink+UseStreamIndex [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardSecretTest::CreateSecretInSubdomain >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardSecretTest::EmptySecretName >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardSecretTest::DropSecret >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TBackupCollectionTests::DropCollectionWithMultipleBackups [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables >> TSchemeShardTest::CreateAlterTableWithCacheMode [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWrite >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::ReadTableRanges >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex-UseSink+UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex+UseSink+UseDataQuery >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> KqpStats::SelfJoin [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret >> TSchemeShardSecretTest::DropNotASecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:02.909091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:02.909193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.909281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:02.909332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:02.909365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:02.909405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:02.909430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:02.909474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:02.909550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.909617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:02.910503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:02.912004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:03.005421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:03.005481Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:03.027061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:03.032075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:03.032263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:03.046956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:03.047705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:03.048513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:03.048801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:03.056879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.057040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:03.064637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:03.064908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:03.065004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:03.065202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:03.289239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.293973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.294039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.294103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.294187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.167663Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 161us result status StatusSuccess 2026-02-19T08:32:05.167919Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.168314Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.168439Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 138us result status StatusSuccess 2026-02-19T08:32:05.168797Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.169219Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.169321Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 117us result status StatusSuccess 2026-02-19T08:32:05.169622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.170084Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.170185Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 118us result status StatusSuccess 2026-02-19T08:32:05.170555Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:02.909095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:02.909215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.909293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:02.909334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:02.909387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:02.909441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:02.909470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:02.909521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:02.909643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.909739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:02.910592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:02.912017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:03.022094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:03.022167Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:03.034950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:03.037675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:03.037952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:03.050797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:03.052257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:03.052980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:03.053288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:03.057668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.057856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:03.064621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:03.064871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:03.064994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:03.065207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:03.253466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.254982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.255074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.255139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.255199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.255280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.255368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 107682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:05.107797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:05.109100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:05.109227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:05.109484Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:05.109562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936751 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.109590Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:05.109793Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:05.109855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:05.110013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:05.110087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:05.111653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:05.111689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:05.111829Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:05.111858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:05.112091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:05.112126Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:05.112195Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:05.112218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:05.112245Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:05.112268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:05.112293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:05.112324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:05.112357Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:05.112384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:05.112439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:05.112474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:05.112509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:05.112970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:05.113075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:05.113113Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:05.113150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:05.113194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:05.113280Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:05.115617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:05.115942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:05.116222Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:678:2666] Bootstrap 2026-02-19T08:32:05.116999Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:678:2666] Become StateWork (SchemeCache [2:683:2671]) 2026-02-19T08:32:05.117227Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.117356Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 136us result status StatusPathDoesNotExist 2026-02-19T08:32:05.117469Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-02-19T08:32:05.117851Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:678:2666] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:05.119651Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2026-02-19T08:32:05.121670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:05.121824Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2026-02-19T08:32:05.121940Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-02-19T08:32:05.123814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.124002Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:32:05.124265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:32:05.124309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:32:05.124657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:32:05.124738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:05.124770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:694:2682] TestWaitNotification: OK eventTxId 101 >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableById >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:02.909890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:02.909974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.910039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:02.910078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:02.910134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:02.910175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:02.910218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:02.910248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:02.910327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.910391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:02.911189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:02.912060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:03.009614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:03.009685Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:03.022286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:03.022890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:03.024373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:03.037726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:03.038562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:03.040811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:03.046338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:03.055649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.056946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:03.064627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:03.064870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:03.064979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:03.065190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:03.259989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.260915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.261998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... hard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:05.857284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:05.857382Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:05.857419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:32:05.857459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:32:05.857873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:05.857933Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:05.858045Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:05.858086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:05.858131Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:05.858174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:05.858215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:32:05.858260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:05.858302Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:05.858347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:05.858429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:05.858474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:32:05.858510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:32:05.858540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:32:05.859104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:05.859221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:05.859265Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:05.859311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:32:05.859356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:05.860066Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:05.860138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:05.860169Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:05.860199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:05.860229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:32:05.860301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:32:05.862944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:05.864035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:32:05.864270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:32:05.864322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:32:05.864695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:32:05.864800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:05.864843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:712:2700] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:32:05.867146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:05.867313Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2026-02-19T08:32:05.867455Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:05.870073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:05.870362Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:32:05.870640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:32:05.870684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:32:05.871050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:05.871146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:05.871188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:719:2707] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:05.871621Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:05.871778Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 179us result status StatusSuccess 2026-02-19T08:32:05.872142Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: 2026-02-19T08:31:37.617059Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491175321169532:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:37.617131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:37.821791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:37.826609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:37.826727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:37.905587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:37.941395Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:37.942732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491175321169501:2081] 1771489897615732 != 1771489897615735 2026-02-19T08:31:38.005970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:38.005995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:38.006000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:38.006090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:38.098326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:38.381639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.387787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:38.434423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.571976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.668737Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.709685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.765812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.731942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491188206073261:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.732064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.732970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491188206073271:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.733052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.113930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.186128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.237274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.274808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.302171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.340044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.416303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.468045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.545832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192501041446:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.545930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.546334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192501041451:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.546378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192501041452:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.546476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.550362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.562560Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491192501041455:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:41.642268Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491192501041506:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:42.617314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491175321169532:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.617374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFu ... ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"WaitOutputTimeUs":{"Count":2,"Sum":75,"Max":42,"Min":33,"History":[5,75]},"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"FinishedTasks":2,"Introspections":["2 tasks default for source scan"],"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"DurationUs":{"Count":2,"Sum":2000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[5,2097152]},"BaseTimeMs":1771489924688,"Output":[{"Pop":{},"Name":"4","Push":{}},{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":7,"Max":4,"Min":3},"ActiveMessageMs":{"Count":2,"Max":4,"Min":3},"FirstMessageMs":{"Count":2,"Sum":7,"Max":4,"Min":3},"Bytes":{"Count":2,"Sum":50,"Max":37,"Min":13,"History":[5,50]}},"LocalBytes":50,"Name":"RESULT","Push":{"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":50,"Max":37,"Min":13,"History":[5,50]},"PauseMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"WaitTimeUs":{"Count":2,"Sum":3241,"Max":1795,"Min":1446,"History":[5,3241]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"CpuTimeUs":{"Count":2,"Sum":854,"Max":605,"Min":249,"History":[5,854]},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[5,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[5,96]},"PauseMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"WaitTimeUs":{"Count":2,"Sum":4248,"Max":2412,"Min":1836,"History":[5,4248]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":3,"Min":2}}}],"StageDurationUs":1000,"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResultBytes":{"Count":2,"Sum":50,"Max":37,"Min":13},"OutputBytes":{"Count":2,"Sum":50,"Max":37,"Min":13},"UpdateTimeMs":4,"Tasks":2}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"WaitOutputTimeUs":{"Count":2,"Sum":2621,"Max":1437,"Min":1184,"History":[6,2621]},"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":2,"InputBytes":{"Count":2,"Sum":50,"Max":37,"Min":13},"Introspections":["2 tasks same as previous stage"],"DurationUs":{"Count":2,"Sum":6000,"Max":3000,"Min":3000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[6,2097152]},"BaseTimeMs":1771489924688,"Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":10,"Max":5,"Min":5},"FirstMessageMs":{"Count":2,"Sum":10,"Max":5,"Min":5},"Bytes":{"Count":2,"Sum":104,"Max":64,"Min":40,"History":[6,104]}},"LocalBytes":104,"Name":"6","Push":{"LastMessageMs":{"Count":2,"Sum":10,"Max":5,"Min":5},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":10,"Max":5,"Min":5},"FirstMessageMs":{"Count":2,"Sum":10,"Max":5,"Min":5},"Bytes":{"Count":2,"Sum":104,"Max":64,"Min":40,"History":[6,104]},"PauseMessageMs":{"Count":2,"Sum":8,"Max":5,"Min":3},"WaitTimeUs":{"Count":2,"Sum":5019,"Max":2635,"Min":2384,"History":[6,5019]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":5,"Min":3}}}],"CpuTimeUs":{"Count":2,"Sum":672,"Max":447,"Min":225,"History":[6,672]},"StageDurationUs":3000,"WaitInputTimeUs":{"Count":2,"Sum":3228,"Max":1757,"Min":1471,"History":[6,3228]},"OutputBytes":{"Count":2,"Sum":104,"Max":64,"Min":40},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":7,"Max":4,"Min":3},"ActiveMessageMs":{"Count":2,"Max":4,"Min":3},"FirstMessageMs":{"Count":2,"Sum":7,"Max":4,"Min":3},"Bytes":{"Count":2,"Sum":50,"Max":37,"Min":13,"History":[6,50]}},"LocalBytes":50,"Name":"2","Push":{"LastMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"FirstMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"Bytes":{"Count":2,"Sum":50,"Max":37,"Min":13,"History":[6,50]},"PauseMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"WaitTimeUs":{"Count":2,"Sum":3241,"Max":1795,"Min":1446,"History":[6,3241]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"UpdateTimeMs":6,"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"InnerJoin (Lookup)"}],"Node Type":"Limit-InnerJoin (Lookup)","Stats":{"WaitOutputTimeUs":{"Count":1,"Sum":26,"Max":26,"Min":26,"History":[6,26]},"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":104,"Max":104,"Min":104},"Introspections":["1 minimum tasks for compute"],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"BaseTimeMs":1771489924688,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]}},"LocalBytes":29,"Name":"8","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":2557,"Max":2557,"Min":2557,"History":[6,2557]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":3}}}],"CpuTimeUs":{"Count":1,"Sum":734,"Max":734,"Min":734,"History":[6,734]},"StageDurationUs":0,"WaitInputTimeUs":{"Count":1,"Sum":35,"Max":35,"Min":35,"History":[6,35]},"OutputBytes":{"Count":1,"Sum":29,"Max":29,"Min":29},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":104,"Max":104,"Min":104,"History":[6,104]}},"LocalBytes":104,"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":104,"Max":104,"Min":104,"History":[6,104]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":2509,"Max":2509,"Min":2509,"History":[6,2509]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":5,"Min":3}}}],"UpdateTimeMs":5,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":29,"Max":29,"Min":29},"Introspections":["1 minimum tasks for compute"],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"BaseTimeMs":1771489924688,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]}},"LocalBytes":29,"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]},"WaitTimeUs":{"Count":1,"Sum":5086,"Max":5086,"Min":5086,"History":[6,5086]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"CpuTimeUs":{"Count":1,"Sum":595,"Max":595,"Min":595,"History":[6,595]},"StageDurationUs":0,"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":29,"Max":29,"Min":29},"OutputBytes":{"Count":1,"Sum":29,"Max":29,"Min":29},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]}},"LocalBytes":29,"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":29,"Max":29,"Min":29,"History":[6,29]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":2557,"Max":2557,"Min":2557,"History":[6,2557]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":3}}}],"UpdateTimeMs":6,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":304979,"CpuTimeUs":298923},"ProcessCpuTimeUs":442,"TotalDurationUs":336963,"ResourcePoolId":"default","QueuedTimeUs":3649},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":10,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"Lookup","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"Name":"InnerJoin (Lookup)"}],"Node Type":"InnerJoin (Lookup)"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.734,"A-Cpu":0.734,"A-Size":29,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.595,"A-Cpu":1.329,"A-Size":29,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink-UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink+UseStreamIndex [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink+UseStreamIndex >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> KqpQuery::DdlInDataQuery [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes >> TSchemeShardSecretTest::CreateSecretOverExistingObject >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:02.910047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:02.910134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.910198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:02.910236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:02.910266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:02.910324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:02.910359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:02.910397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:02.910474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:02.910535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:02.911406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:02.912010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:03.005498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:03.005567Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:03.025945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:03.031794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:03.031991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:03.047369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:03.048058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:03.048818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:03.049126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:03.056274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.056949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:03.064641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:03.064822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:03.064876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:03.065017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:03.065204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:03.245664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.247862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.248922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:03.249013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... hemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:07.447752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:07.447907Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:07.447950Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:32:07.448006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2210], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:32:07.448304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:07.448355Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:07.448465Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:07.448509Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:07.448552Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:07.448630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:07.448684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-02-19T08:32:07.448737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:07.448776Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:07.448813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:07.448937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:07.448986Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-02-19T08:32:07.449036Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-02-19T08:32:07.449088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-02-19T08:32:07.449999Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:07.450120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:07.450163Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:07.450237Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:32:07.450312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:07.451065Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:07.451143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:07.451174Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:07.451205Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:07.451250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:32:07.451329Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:32:07.454620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:07.455111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:32:07.455408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:32:07.455483Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:32:07.455925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:32:07.456023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:07.456069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:710:2698] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:32:07.458864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:07.459048Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2026-02-19T08:32:07.459220Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:07.461559Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-02-19T08:32:07.461829Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:32:07.462132Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:32:07.462195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:32:07.462629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:07.462722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:07.462762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:717:2705] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:07.463244Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:07.463440Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 224us result status StatusSuccess 2026-02-19T08:32:07.463828Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: 2026-02-19T08:31:37.834516Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491175296962668:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:37.836094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:38.091652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:38.107474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:38.107633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:38.152010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:38.208720Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:38.211401Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491175296962618:2081] 1771489897829931 != 1771489897829934 2026-02-19T08:31:38.267515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:38.267551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:38.267562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:38.267694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:38.336290Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:38.691737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:38.753116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:38.843436Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:38.924941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.082276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:39.145920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:40.923865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491188181866381:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.924030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.924624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491188181866391:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:40.924685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.267382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.306455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.340273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.371990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.405690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.450723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.501794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.551704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.641636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192476834557:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.641727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.642155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192476834563:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.642196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491192476834562:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.642293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:41.646637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:41.667823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:31:41.668010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491192476834566:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:41.729910Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491192476834617:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:42.837811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491175296962668:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.839063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:44.259229Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491203284498952:2073];send_to=[0:7307199536658146131:7762515 ... Id: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.232958Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.233521Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.233997Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.234601Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.234975Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.235439Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.235838Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.236203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.236730Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.237004Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.237678Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.237849Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.238729Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.238857Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.239639Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.239895Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.240521Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.240892Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.241443Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.241987Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.242313Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.242816Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.243056Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.243468Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-02-19T08:32:05.243993Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:302;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardSecretTest::DefaultDescribeSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DdlInDataQuery [GOOD] Test command err: 2026-02-19T08:31:40.509408Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491187057735952:2141];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.509814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:40.731178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:40.752655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:40.752726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:40.809343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:40.817732Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:40.836307Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491187057735849:2081] 1771489900506327 != 1771489900506330 2026-02-19T08:31:40.902566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:40.902588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:40.902594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:40.902689Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.023708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.393517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.409866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:41.522723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:43.445436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491199942638644:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.445506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491199942638619:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.445633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.445979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491199942638647:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.446076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.449293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:43.458567Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491199942638646:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:31:43.582344Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491199942638701:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:43.834071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.113563Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=ZTQ0ZjEwNzAtOTVlODMxYTEtNWNiOGNiMjgtZTA1YzBjNDY=, ActorId: [1:7608491199942638811:2345], ActorState: ExecuteState, LegacyTraceId: 01khtgdhb117n34fcfamz63s30, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1388: Invalid Decimal value for precision:  status# BAD_REQUEST issues# trace_id# 2026-02-19T08:31:44.835962Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491204926649260:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:44.836034Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:44.875678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.019649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.019718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.022116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.029076Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491204926649218:2081] 1771489904834712 != 1771489904834715 2026-02-19T08:31:45.039829Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.074967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.138268Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.138296Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.138302Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.138373Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.691194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:45.698527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:45.843051Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:47.847711Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491217811551992:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.847797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491217811551979:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.847879Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.848291Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491217811552010:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.848357Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:47.851017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:47.859048Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491217811552009:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:31:47.947680Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491217811552062:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, ... : schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:00.888211Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:01.046847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:01.110175Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:01.239649Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:03.994136Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491287683864541:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:03.994270Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:03.994623Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491287683864551:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:03.994707Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.087870Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.125338Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.172236Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.212616Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.252568Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.308644Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.357777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.414372Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.533004Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491291978832718:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.533107Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.533183Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491291978832723:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.533596Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491291978832725:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.533646Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:04.538816Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:04.551556Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491291978832726:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:04.644870Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491291978832778:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:05.174981Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491274798960818:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:05.175054Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:06.592979Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608491300568767699:2543], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-02-19T08:32:06.594200Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=YjZmNDgxNzItZTI4ZWY3ZTUtYTZmNzVhMzItMjM3NmJmMjI=, ActorId: [5:7608491300568767691:2538], ActorState: ExecuteState, LegacyTraceId: 01khtge7dbfehbcn55wcwgcbcq, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 5 column: 30 } message: "Operation \'CreateTable\' can\'t be performed in data query" end_position { row: 5 column: 30 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-02-19T08:32:06.627782Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608491300568767712:2546], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-02-19T08:32:06.629120Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=YjZmNDgxNzItZTI4ZWY3ZTUtYTZmNzVhMzItMjM3NmJmMjI=, ActorId: [5:7608491300568767691:2538], ActorState: ExecuteState, LegacyTraceId: 01khtge7ef4936qetvee47fzqk, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 24 } message: "Operation \'DropTable\' can\'t be performed in data query" end_position { row: 2 column: 24 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-02-19T08:32:06.669824Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [5:7608491300568767721:2550], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2026-02-19T08:32:06.672151Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=5&id=YjZmNDgxNzItZTI4ZWY3ZTUtYTZmNzVhMzItMjM3NmJmMjI=, ActorId: [5:7608491300568767691:2538], ActorState: ExecuteState, LegacyTraceId: 01khtge7ft2cjd508vscs05685, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 54 } message: "Operation \'AlterTable\' can\'t be performed in data query" end_position { row: 2 column: 54 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 >> TSchemeShardSecretTest::CreateNotInDatabase >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> KqpQuery::UpdateThenDelete-UseSink >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: 2026-02-19T08:31:45.013928Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491210399592209:2253];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.014160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.245781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.278699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.278829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.366394Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491206104624673:2081] 1771489904998180 != 1771489904998183 2026-02-19T08:31:45.388736Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.392888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.511828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.517893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.517923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.517939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.518068Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.920306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:45.988519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.016560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.112223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.257586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.340090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.151801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491223284495738:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.151939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.152378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491223284495748:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.152520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.498937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.534909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.565938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.597699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.629364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.666930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.713173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.772296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.856849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491223284496615:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.856952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.857424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491223284496621:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.857431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491223284496620:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.857474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.861161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:48.871777Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491223284496624:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:48.931121Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491223284496675:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.013852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491210399592209:2253];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.013955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:50.449939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Warning: Optimization, ... 2-19T08:32:07.516211Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.516219Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522116Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522173Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522187Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522326Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522372Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.522381Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.528565Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.528635Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.528652Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.531398Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.531454Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.531467Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.535449Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.535488Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.535501Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.537818Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.537877Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.537890Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.541057Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.541102Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.541115Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.545070Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.545133Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.545148Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.546338Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.546386Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.546399Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552501Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552536Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552535Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552544Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552574Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.552588Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559599Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559604Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559648Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559650Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559660Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.559681Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:32:07.634139Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491305078672129:2794], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:07.634238Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:07.634614Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491305078672131:2795], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:07.634690Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:07.649634Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491305078672140:3869] txid# 281474976710660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> TBackupCollectionTests::DropLargeBackupCollection [GOOD] >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectView >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:08.772947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:08.773045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:08.773114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:08.773160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:08.773198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:08.773234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:08.773265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:08.773298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:08.773383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:08.773457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:08.774389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:08.774708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:08.889567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:08.889641Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:08.902415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:08.903128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:08.903317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:08.913649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:08.914340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:08.915013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:08.915275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:08.917959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:08.918110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:08.919152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:08.919206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:08.919290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:08.919329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:08.919431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:08.919631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:09.079371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.080963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.081565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ration: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:32:10.942226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:32:10.942257Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:32:10.942285Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 6 2026-02-19T08:32:10.942324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:32:10.945437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:32:10.945523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-02-19T08:32:10.945555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-02-19T08:32:10.945582Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2026-02-19T08:32:10.945632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-02-19T08:32:10.945729Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-02-19T08:32:10.948192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:32:10.948304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-02-19T08:32:10.949541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-02-19T08:32:10.949768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:32:10.949815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-02-19T08:32:10.950215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:32:10.950302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:32:10.950338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:848:2837] TestWaitNotification: OK eventTxId 105 2026-02-19T08:32:10.950819Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:10.950994Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 205us result status StatusSuccess 2026-02-19T08:32:10.951301Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:10.951764Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:10.951883Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 136us result status StatusSuccess 2026-02-19T08:32:10.952141Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:10.952546Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:10.952687Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 181us result status StatusSuccess 2026-02-19T08:32:10.953015Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpStats::RequestUnitForExecute [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTablesForRestore >> TSchemeShardSecretTest::CreateSecret >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:08.838847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:08.838916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:08.838964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:08.838994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:08.839021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:08.839050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:08.839072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:08.839093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:08.839417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:08.839469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:08.840120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:08.840384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:08.925132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:08.925207Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:08.938396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:08.939273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:08.939486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:08.955721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:08.958241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:08.959016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:08.959384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:08.962894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:08.963077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:08.964147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:08.964206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:08.964324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:08.964365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:08.964462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:08.964651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:09.108537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.109504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.109636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.109753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.109914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.109991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:09.110643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... e 2026-02-19T08:32:11.617436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-02-19T08:32:11.617588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-02-19T08:32:11.619507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:11.619799Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:11.619929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:11.619978Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000039 2026-02-19T08:32:11.620125Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 Propose -> Done 2026-02-19T08:32:11.620288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:32:11.620349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:32:11.622230Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:11.622269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:11.622389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:32:11.622497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:11.622531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:845:2800], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:32:11.622568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:845:2800], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:32:11.622821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:11.622863Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:32:11.622962Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:11.622998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:11.623035Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:11.623078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:11.623115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:32:11.623158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:11.623194Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:32:11.623226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:32:11.623287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:11.623328Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-02-19T08:32:11.623376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-02-19T08:32:11.623406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-02-19T08:32:11.623957Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:11.624031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:11.624064Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:11.624109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:11.624152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:11.624786Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:11.624854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:11.624883Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:11.624911Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:32:11.624941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:11.625012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:32:11.628052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:11.628138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:32:11.628357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:32:11.628403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:32:11.628788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:32:11.628872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:11.628912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:878:2831] TestWaitNotification: OK eventTxId 103 2026-02-19T08:32:11.629352Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:11.629512Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 192us result status StatusSuccess 2026-02-19T08:32:11.629816Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:09.727673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:09.727752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:09.727810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:09.727850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:09.727925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:09.727957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:09.727983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:09.728010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:09.728082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:09.728145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:09.728939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:09.729254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:09.823170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:09.823229Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:09.839547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:09.840369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:09.840573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:09.860836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:09.862262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:09.862940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:09.863251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:09.866359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:09.866518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:09.867637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:09.867700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:09.867819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:09.867863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:09.867954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:09.868140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:10.019988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.021932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.022005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.022072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.022141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.022229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 02-19T08:32:12.004330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-02-19T08:32:12.004363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-02-19T08:32:12.005070Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:12.005156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:12.005213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:12.005259Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:12.005313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:12.005991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:12.006070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:12.006101Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:12.006132Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-02-19T08:32:12.006165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:12.006240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:32:12.009270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:12.009757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:32:12.009958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:32:12.010005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:32:12.010383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:12.010477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:12.010538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:737:2726] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:12.010979Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:12.011162Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 215us result status StatusSuccess 2026-02-19T08:32:12.011503Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-02-19T08:32:12.014132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:12.014348Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2026-02-19T08:32:12.014418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2026-02-19T08:32:12.014561Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:12.018820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:12.019102Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-02-19T08:32:12.019450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:32:12.019494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-02-19T08:32:12.019906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:32:12.020002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:12.020046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:745:2734] TestWaitNotification: OK eventTxId 103 2026-02-19T08:32:12.020514Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:12.020687Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 195us result status StatusSuccess 2026-02-19T08:32:12.021011Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpQuery::DictJoin [GOOD] >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:09.911511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:09.911603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:09.911663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:09.911711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:09.911740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:09.911769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:09.911794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:09.911834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:09.911905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:09.911973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:09.912630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:09.912868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:09.997682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:09.997782Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:10.015652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:10.022289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:10.022524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:10.035323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:10.036121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:10.036977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:10.037281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:10.040466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:10.040637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:10.041832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:10.041894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:10.041999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:10.042047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:10.042149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:10.042366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:10.202498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.203522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.203661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.203759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.203884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.203957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:10.204639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 4: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:12.033946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-02-19T08:32:12.033987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-02-19T08:32:12.034006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:32:12.034291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:12.034330Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:32:12.034412Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:12.034439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:12.034487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:12.034518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:12.034564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:32:12.034599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:12.034627Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:32:12.034661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:32:12.034737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:12.034773Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-02-19T08:32:12.034800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-02-19T08:32:12.034834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:32:12.034851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-02-19T08:32:12.035586Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.035677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.035731Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:12.035766Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-02-19T08:32:12.035807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:12.036581Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.036654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.036697Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:12.036728Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:32:12.036758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:12.037773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.037851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:12.037884Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:12.037922Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-02-19T08:32:12.037951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:32:12.038021Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:32:12.038529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-02-19T08:32:12.038582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-02-19T08:32:12.038673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:32:12.040980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:12.042279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:12.042390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:12.043910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2026-02-19T08:32:12.044369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-02-19T08:32:12.044431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2026-02-19T08:32:12.044522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-02-19T08:32:12.044546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-02-19T08:32:12.044963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-02-19T08:32:12.045066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-02-19T08:32:12.045104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:772:2761] 2026-02-19T08:32:12.045338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-02-19T08:32:12.045398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:32:12.045423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:772:2761] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2026-02-19T08:32:12.046016Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:12.046208Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 245us result status StatusPathDoesNotExist 2026-02-19T08:32:12.046407Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: 2026-02-19T08:31:45.002272Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491209493137117:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.002319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.317886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.417624Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491205198169656:2081] 1771489904988903 != 1771489904988906 2026-02-19T08:31:45.434208Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.441442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.441547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.476343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.554396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.554415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.554422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.554496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.614811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.972566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:45.981435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:46.022290Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.040714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.209752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.373128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.446815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.263861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222378040724:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.264016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.264346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222378040734:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.264395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.637065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.668212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.718120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.749593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.783623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.819835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.859466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.927787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.016248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226673008901:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.016318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.016549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226673008906:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.016572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226673008907:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.016655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.020070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.030888Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491226673008910:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:49.086239Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491226673008961:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.001953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491209493137117:2194];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.002009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:50.572219Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491230967976558:2541], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INC ... e 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:04.125737Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491292221603721:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:04.125797Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:04.152394Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:04.226691Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:04.228562Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491292221603690:2081] 1771489924124878 != 1771489924124881 2026-02-19T08:32:04.258724Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:04.258817Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:04.269799Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:04.337787Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:04.358124Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:04.358151Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:04.358159Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:04.358242Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:04.835033Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:04.839931Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:04.852104Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:04.906228Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:05.125408Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:05.166024Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:05.194694Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.089695Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491309401474743:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.089806Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.090150Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491309401474752:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.090199Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.182621Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.217661Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.291321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.329589Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.369507Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.420409Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.462592Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.522546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.627128Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491309401475629:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.627236Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.627298Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491309401475634:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.628500Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491309401475637:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.628575Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.631498Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:08.642685Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491309401475636:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:08.721327Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491309401475689:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:09.127448Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491292221603721:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:09.127531Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 276 Consumed units: 6 >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> OperationMapping::IndexBuildRejected [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TSchemeShardTest::CreateIndexedTablesForRestore [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyIndexedTablesForBackup |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> KqpParams::EmptyListForListParameterExecuteDataQuery [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> OperationMapping::IndexBuildCanceled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: 2026-02-19T08:31:45.100629Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491211267891796:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.100746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.206434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:45.525238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.525348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.531671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.565898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.619790Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.753509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.753535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.753542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.753613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.831646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:46.106627Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.247396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.253687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:48.283837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224152794418:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.283993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.284444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224152794453:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.285323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224152794458:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.285396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.285985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224152794473:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.286062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.288168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:48.305868Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491224152794455:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:31:48.434757Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491224152794510:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:48.626370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.619196Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491228245905821:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:49.619265Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:49.652427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:49.755492Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:49.757153Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491228245905790:2081] 1771489909617911 != 1771489909617914 2026-02-19T08:31:49.785165Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:49.785249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:49.795907Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:49.850382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:49.850405Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:49.850417Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:49.850511Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:49.925833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:50.198775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:50.637477Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:52.860285Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491241130808542:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.860354Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491241130808568:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.860404Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.862621Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491241130808580:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.862730Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.864578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:52.874823Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491241130808579:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:31:52.958565Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491241130808632:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathI ... 05.614023Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491293681299537:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:05.629328Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:05.632044Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:05.751573Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:05.761907Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491293681299505:2081] 1771489925602583 != 1771489925602586 2026-02-19T08:32:05.775595Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:05.778759Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:05.778835Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:05.787064Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:05.886272Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:05.886301Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:05.886308Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:05.886386Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:06.041798Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:06.414959Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:06.421625Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:06.434963Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:06.545615Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:06.664451Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:06.712184Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:06.786372Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.083467Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491310861170564:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.083557Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.083781Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491310861170573:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.083820Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.177359Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.255087Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.290798Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.331466Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.378812Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.439127Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.485242Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.542162Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.620446Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491310861171443:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.620539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.620836Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491310861171448:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.620887Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491310861171449:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.620995Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:09.624342Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:09.640422Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491310861171452:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:32:09.741089Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491310861171505:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:10.609030Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491293681299537:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:10.609091Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] Test command err: 2026-02-19T08:31:45.030804Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491207810223783:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.030860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.326814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.357838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.357968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.446502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.450143Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.451664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491203515256263:2081] 1771489905020067 != 1771489905020070 2026-02-19T08:31:45.516862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.586329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.586347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.586352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.586444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:46.025748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.031007Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.032699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:46.100756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.248632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.406597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.477296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.324754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491220695127323:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.324892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.330306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491220695127333:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.330408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.655792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.693659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.725405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.756525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.791435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.826204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.866069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.914750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.023311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224990095505:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.023433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.023813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224990095510:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.023858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491224990095511:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.023965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.029075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.039911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491224990095514:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:49.137934Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491224990095567:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.031552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491207810223783:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.031647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:51.669057Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491235911933634:2076];send_to=[0:7307199536658146131:7762515 ... ERROR: schemereq.cpp:627: Actor# [4:7608491298913818911:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:06.694705Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491298913818932:2339], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-02-19T08:32:06.696971Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZmJlZDdlZTktM2MzYTA0MzMtYzhiMDkzNTYtZDRhNzY3Mjc=, ActorId: [4:7608491298913818817:2325], ActorState: ExecuteState, LegacyTraceId: 01khtge4ewbr95xwcr4p6gr1xa, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 62 } message: "At function: KiCreateTable!" end_position { row: 6 column: 62 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:06.750521Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491298913818956:2348], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-02-19T08:32:06.751159Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=OWRmNjg0ZTEtZDdlYmNhYTMtNzY5YTQwYmQtNmQ0ZGFlMDM=, ActorId: [4:7608491298913818945:2342], ActorState: ExecuteState, LegacyTraceId: 01khtge7hp49sz2yh0g3zdvsz8, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 45 } message: "At function: KiCreateTable!" end_position { row: 6 column: 45 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:06.797984Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491298913818975:2354], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-02-19T08:32:06.799939Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=NWM1NTg5NDctNGM1MDljODMtOTI0YjIzMmItYjQ3ODU5OGY=, ActorId: [4:7608491298913818969:2351], ActorState: ExecuteState, LegacyTraceId: 01khtge7kd3t7b8bjbw6mm1x2b, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 43 } message: "At function: KiCreateTable!" end_position { row: 6 column: 43 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:07.689316Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491303033687217:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:07.689362Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:07.708030Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:07.805103Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:07.840993Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:07.841096Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:07.851551Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:07.874444Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:07.900075Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:07.900101Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:07.900112Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:07.900198Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:08.495193Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:08.508091Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:08.528485Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:08.604953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:08.713207Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:11.632968Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491320213557329:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.633042Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491320213557321:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.633122Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.633513Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491320213557336:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.633568Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.638303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:11.653172Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491320213557335:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-02-19T08:32:11.744743Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491320213557390:2597] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:11.781128Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.083096Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.315509Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:12.324715Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:12.335062Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:12.340660Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715672, at schemeshard: 72057594046644480 >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> OperationMapping::IndexBuildSuccess [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:11.146819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:11.146902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:11.146988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:11.147028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:11.147063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:11.147102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:11.147131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:11.147162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:11.147257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:11.147330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:11.148193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:11.148512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:11.239218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:11.239288Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:11.252176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:11.252954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:11.253167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:11.265140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:11.265813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:11.266522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:11.266830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:11.270282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:11.270455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:11.271669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:11.271730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:11.271831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:11.271876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:11.271965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:11.272152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:11.422911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.423897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:11.424938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... er_secret.cpp:31: [72057594046678944] TAlterSecret TPropose operationId# 103:0 ProgressState 2026-02-19T08:32:14.375904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-02-19T08:32:14.376030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:14.376503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:14.376669Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter), operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2026-02-19T08:32:14.378368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-02-19T08:32:14.378514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-02-19T08:32:14.378842Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:14.378959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904048 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:14.379022Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000040 2026-02-19T08:32:14.379168Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 Propose -> Done 2026-02-19T08:32:14.379383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2026-02-19T08:32:14.381343Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:14.381398Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:32:14.381558Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:14.381601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:211:2210], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:32:14.382042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:14.382107Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:32:14.382224Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:14.382264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:14.382307Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:14.382367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:14.382421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-02-19T08:32:14.382466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:14.382509Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:32:14.382546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:32:14.382616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:14.382666Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-02-19T08:32:14.382704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-02-19T08:32:14.383179Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:14.383288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:14.383359Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:14.383393Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:32:14.383426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:14.383499Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-02-19T08:32:14.385638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-02-19T08:32:14.385914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-02-19T08:32:14.385960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-02-19T08:32:14.386053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:32:14.386077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:32:14.386531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-02-19T08:32:14.386645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:14.386685Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:758:2746] 2026-02-19T08:32:14.386843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:32:14.386908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:32:14.386932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:758:2746] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-02-19T08:32:14.387371Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:14.387580Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 232us result status StatusSuccess 2026-02-19T08:32:14.387915Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink+UseStreamIndex [GOOD] >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> TSchemeShardTest::CopyIndexedTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> KqpQuery::SelectCountAsteriskFromVar [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:12.594688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:12.594770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:12.594855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:12.594899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:12.594935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:12.594982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:12.595023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:12.595053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:12.595123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:12.595188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:12.596083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:12.596409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:12.681150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:12.681233Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:12.694234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:12.694959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:12.695152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:12.707243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:12.707989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:12.708759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:12.709064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:12.712270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:12.712451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:12.713641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:12.713704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:12.713828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:12.713875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:12.713993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:12.714202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:12.855277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.856956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.857024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.857089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.857158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.857228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:12.857312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... eration IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:14.826878Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:32:14.826899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:14.826923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-02-19T08:32:14.826950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:14.826993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:14.827029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:14.827093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:14.827126Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-02-19T08:32:14.827147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:1 2026-02-19T08:32:14.827181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:14.827201Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-02-19T08:32:14.827219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:2 2026-02-19T08:32:14.827246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:32:14.827277Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-02-19T08:32:14.827313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-02-19T08:32:14.827361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-02-19T08:32:14.827381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-02-19T08:32:14.827402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-02-19T08:32:14.828431Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.828507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.828540Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:14.828590Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:32:14.828632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:14.829796Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.829870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.829898Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:14.829926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-02-19T08:32:14.829955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:14.830772Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.830837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.830863Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:14.830891Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:32:14.830917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:14.833575Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.833644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:14.833670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:14.833696Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:32:14.833739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-02-19T08:32:14.833805Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-02-19T08:32:14.838801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:14.838942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:14.842304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:14.842454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-02-19T08:32:14.842684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-02-19T08:32:14.842729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-02-19T08:32:14.843093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-02-19T08:32:14.843200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:14.843240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:723:2712] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:14.843689Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:14.843881Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 236us result status StatusSuccess 2026-02-19T08:32:14.844206Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::MixedCreateAsSelect >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> KqpQuery::QueryFromSqs [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> KqpExplain::UpdateOn+UseSink [GOOD] >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> KqpExplain::MultiJoinCteLinks [GOOD] >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels |95.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink+UseStreamIndex [GOOD] Test command err: 2026-02-19T08:31:40.219620Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491188125999967:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.221873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:40.419440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:40.463062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:40.463151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:40.500927Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491188125999936:2081] 1771489900214190 != 1771489900214193 2026-02-19T08:31:40.513025Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:40.523611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:40.578531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:40.605175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:40.605197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:40.605211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:40.605321Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:40.994647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.048793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.219635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.228694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:41.418121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.502291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.291781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491201010903704:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.291933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.292271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491201010903714:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.292330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.680208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.708751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.735580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.765469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.800653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.830639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.897415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.940527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.030252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205305871877:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.030325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.030386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205305871882:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.030438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205305871884:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.030486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.033474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:44.043834Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491205305871886:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:44.136191Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491205305871937:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.219073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491188125999967:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.219165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:45.793765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.840896Z no ... n, path: Root/.metadata/script_executions 2026-02-19T08:32:08.197740Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:08.203132Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:08.220423Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.331637Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.542856Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:08.607287Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.696642Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.391480Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491322486225215:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.391596Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.391926Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491322486225225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.391976Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.486946Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.519148Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.554378Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.594006Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.628258Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.664597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.707790Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.765793Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.859156Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491322486226096:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.859324Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.859903Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491322486226102:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.859903Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491322486226101:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.859996Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:11.863900Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:11.875481Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491322486226105:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:11.940305Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491322486226156:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:12.452543Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491305306354213:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:12.452619Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:13.794887Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.841142Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.882809Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","SinkType":"KqpTableSink","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys","ReadRangesPointPrefixLen":"1","E-Rows":"0","ReadRangesKeys":["Key"],"Table":"SecondaryKeys","ReadColumns":["Key [1, 4)","Fk"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"columns":["Fk","Key"],"scan_by":["Key [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Fk","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"SecondaryKeys"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: 2026-02-19T08:31:40.986324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491188387385247:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.986432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.199071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.243685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.243808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.304471Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.305886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.309899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491188387385110:2081] 1771489900962162 != 1771489900962165 2026-02-19T08:31:41.403573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.410959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.410985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.411002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.411082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.832928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.839567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:41.903245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.988542Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.036582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.170193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.225956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.119849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205567256173:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.119984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.120562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205567256183:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.120612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.431258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.468726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.504317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.537578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.570014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.604697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.637362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.699509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.776692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205567257052:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.776768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.776856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205567257057:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.777132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205567257059:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.777174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.780676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:44.795479Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491205567257060:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:44.849112Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491205567257112:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.985190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491188387385247:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.985250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:47.623083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491216907765625:2073];send_to=[0:7307199536658146131:7762515 ... 2515]; 2026-02-19T08:32:05.256482Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:08.019549Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491309911632581:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:08.019636Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:08.066301Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:08.147583Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:08.149388Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491309911632446:2081] 1771489928014934 != 1771489928014937 2026-02-19T08:32:08.157208Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:08.157310Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:08.185333Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:08.265813Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:08.265837Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:08.265844Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:08.265950Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:08.323769Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:08.797863Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:08.805291Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:08.822222Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:08.900253Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.073556Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:09.086550Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.173084Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.044138Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327091503502:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.044245Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.044707Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327091503511:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.044772Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.114361Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.148933Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.188323Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.224779Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.260847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.299038Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.341685Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.394766Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.501971Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327091504384:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.502050Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.502716Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327091504389:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.502766Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327091504390:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.502886Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.506449Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:12.518008Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491327091504393:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:32:12.579679Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491327091504444:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:13.019309Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491309911632581:2164];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:13.020100Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain |95.2%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: 2026-02-19T08:31:47.837557Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491219264597114:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:47.838917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:48.054470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:48.085218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:48.085356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:48.154601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491219264597071:2081] 1771489907833193 != 1771489907833196 2026-02-19T08:31:48.164749Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:48.168232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:48.229539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:48.283002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:48.283027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:48.283040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:48.283110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:48.694208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:48.700573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:48.763572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.843911Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:48.924048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.079505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.143678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.931023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491232149500834:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.931127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.931444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491232149500844:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.931518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.238390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.264956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.296553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.326118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.355418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.386320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.422217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.464500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.535484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491236444469010:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.535559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.535832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491236444469015:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.535841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491236444469016:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.535921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.539879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:51.553883Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491236444469019:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:51.612928Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491236444469070:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:52.835992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491219264597114:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:52.836120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:53.091706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... t detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:08.309966Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.006152s 2026-02-19T08:32:08.316524Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:08.445066Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:08.445174Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:08.448850Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:08.481671Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:08.488276Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:08.586350Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:08.586381Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:08.586389Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:08.586475Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:08.785514Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:09.175234Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:09.181874Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:09.204691Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.296438Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.306484Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:09.523557Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.605184Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.225262Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491323770805159:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.225393Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.225927Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491323770805169:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.226031Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.295672Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.335902Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.371144Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.405777Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.439917Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.477169Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.514868Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.564197Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.649615Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491323770806048:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.649735Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.650242Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491323770806053:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.650328Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491323770806054:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.650420Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.654947Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:12.664773Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491323770806057:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:12.739571Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491323770806108:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:13.275840Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491306590934143:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:13.275904Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:14.325186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOn+UseSink [GOOD] Test command err: 2026-02-19T08:31:41.017554Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491193900735755:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.020700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.341887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.341976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.377788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.404342Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491189605768340:2081] 1771489901008734 != 1771489901008737 2026-02-19T08:31:41.407607Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.416540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.494852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.494881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.495264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.495354Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.549210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.913426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.919521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:31:41.991750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.017233Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.136998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.289995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.373859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.347681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491206785639401:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.347809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.349351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491206785639411:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.349406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.724801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.758604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.787897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.861205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.891518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.935173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.984109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.056382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.162174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491211080607592:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.162280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.163062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491211080607597:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.163104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491211080607598:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.163230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.167569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:45.182721Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491211080607601:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:45.283591Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491211080607652:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:46.012334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491193900735755:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:46.012444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"Pla ... 46131:7762515]; 2026-02-19T08:32:09.027733Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:09.058666Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:09.180175Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:09.180265Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:09.215310Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:09.227227Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:09.286357Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:09.286383Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:09.286391Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:09.286470Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:09.300262Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:09.848773Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:09.864638Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.935438Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.106252Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:10.174977Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.251869Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.152141Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491331445978441:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.152221Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.152635Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491331445978450:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.152682Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.236740Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.273867Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.314110Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.350629Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.388865Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.424568Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.471640Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.521526Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.624568Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491331445979321:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.624720Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.624942Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491331445979326:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.625066Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491331445979327:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.625167Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.629226Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:13.644107Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491331445979330:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:13.738387Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491331445979381:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:14.027712Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491314266107453:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:14.027776Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":2,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"[{Data: 0,Key: 100}]","Name":"Iterator"}],"Node Type":"ConstantExpr"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","writes":[{"columns":["Data","Key"],"type":"MultiUpdate"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Update","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Update"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: 2026-02-19T08:31:40.583925Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491188441158671:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.584604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:40.794182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:40.800174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:40.800259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:40.856946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:40.864045Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:40.865138Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491188441158615:2081] 1771489900576941 != 1771489900576944 2026-02-19T08:31:40.997782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.004205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.004227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.004236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.004332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.455665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.527715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.593186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:41.672549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.845952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.909502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:43.879463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491201326062376:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.879573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.882975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491201326062386:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.883042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.185112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.223459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.252652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.283721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.318670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.372035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.410112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.451632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.532555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205621030551:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.532630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.532732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205621030556:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.532762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205621030558:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.532786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.536430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:44.547372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491205621030560:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:44.651322Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491205621030613:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.581864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491188441158671:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.581995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","T ... 08:32:09.829953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.958541Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:10.008676Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.089968Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.821952Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491323500267706:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.822058Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.822319Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491323500267715:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.822359Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.897541Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.937474Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.971691Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.004761Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.035676Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.072982Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.111280Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.159888Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.254197Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327795235894:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.254302Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.254520Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327795235899:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.254538Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491327795235900:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.254591Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.258443Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:13.268106Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491327795235903:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:13.367562Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491327795235954:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:13.964573Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491306320396912:2276];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:13.964629Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"0","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (Map)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (Map)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (Map)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (Map)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> KqpQuery::UpdateThenDelete-UseSink [GOOD] >> KqpQuery::CreateAsSelectView [GOOD] >> KqpQuery::CreateTableAs_MkDir >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::PathErrors |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: 2026-02-19T08:31:40.929289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491187731547659:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.929355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.203913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.239128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.239254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.294479Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491187731547629:2081] 1771489900926923 != 1771489900926926 2026-02-19T08:31:41.307316Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.321299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.402074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.402096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.402108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.402198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.477816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.851825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.865046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:41.933037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:41.935845Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.076453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.203903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.270101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.196201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204911418688:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.196318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.202614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204911418698:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.202725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.499853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.537430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.567194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.598467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.626389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.667450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.702608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.747653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.844400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204911419565:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.844485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.844695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204911419571:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.844730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204911419572:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.844877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.848204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:44.858278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491204911419575:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:44.913522Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491204911419626:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.929360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491187731547659:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.929461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1", ... Scan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} 2026-02-19T08:32:09.907138Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491312672469669:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:09.907233Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:09.929781Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:10.035124Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:10.035223Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:10.037827Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491312672469634:2081] 1771489929905968 != 1771489929905971 2026-02-19T08:32:10.056917Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:10.097355Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:10.173311Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:10.173348Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:10.173358Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:10.173451Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:10.212372Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:10.747543Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:10.756538Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:10.768057Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.835217Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.958310Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:11.008144Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.088778Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.992996Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491329852340699:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.993119Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.993400Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491329852340709:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.993460Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.072758Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.109141Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.143184Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.180402Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.214858Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.255252Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.296536Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.354901Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.449143Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491334147308873:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.449238Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.449476Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491334147308878:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.449513Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491334147308879:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.449553Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.453386Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:14.465540Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491334147308882:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:14.543860Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491334147308933:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:14.907994Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491312672469669:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:14.908090Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit |95.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink [GOOD] Test command err: 2026-02-19T08:31:41.212393Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491191985361035:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.212445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.489925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.517889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.517979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.581938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.608368Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.612881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491191985360801:2081] 1771489901188526 != 1771489901188529 2026-02-19T08:31:41.725045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.725069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.725083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.725190Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.783811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:42.138894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:42.207334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.212986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.377301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.594011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.664866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.443743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204870264574:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.443841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.444258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491204870264584:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.444294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.787906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.822127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.854115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.885259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.916159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.994306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.034759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.117989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.214360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209165232757:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.214638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.215029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209165232762:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.215080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209165232763:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.215187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.219528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:45.238664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491209165232766:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:45.304125Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491209165232819:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:46.213062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491191985361035:2262];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:46.213148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:47.299685Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491217755167733:2547], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATC ... ols/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:07.131912Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491281512218445:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:07.132920Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] 2026-02-19T08:32:09.921045Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491311059888456:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:09.921368Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:09.947687Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:10.040960Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:10.041062Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:10.052551Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:10.071454Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:10.147954Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:10.170392Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:10.170415Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:10.170422Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:10.170509Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:10.669702Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:10.687244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.753567Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:10.927377Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:10.933808Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.049816Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.118673Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491332534726747:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.118767Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.119048Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491332534726756:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.119098Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.197896Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.235755Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.272811Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.313159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.363656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.408476Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.446867Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.498036Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.590214Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491332534727627:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.590322Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.590454Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491332534727632:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.590519Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491332534727634:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.590569Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.594306Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:14.604604Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491332534727636:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:32:14.684950Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491332534727687:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:14.921152Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491311059888456:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:14.921234Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: 2026-02-19T08:30:31.601032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490890331814963:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:31.601106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:31.794051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:31.820765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:31.820891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:31.878372Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:31.882455Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490890331814930:2081] 1771489831599452 != 1771489831599455 2026-02-19T08:30:31.892055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:31.978698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:31.978721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:31.978732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:31.978822Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:32.056080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:32.372614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:32.608414Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:34.554447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490903216717708:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.554570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.554938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490903216717718:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.554991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:34.834540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.930880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:34.972225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.008120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.037701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.064835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.140165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.173323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.201936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.239339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.273050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.305572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.329675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.364909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.398567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.426049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:35.471758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-02-19T08:30:35.518023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490907511686336:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.518094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.518287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490907511686338:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.518364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:35.743173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_ac ... : \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:07.943084Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7608491303891935361:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:07.943486Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:07.975870Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:08.121279Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:08.122887Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7608491303891935309:2081] 1771489927932660 != 1771489927932663 2026-02-19T08:32:08.135672Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:08.135784Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:08.168708Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:08.252013Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:08.290309Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:08.290335Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:08.290343Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:08.290442Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:08.945977Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:09.099958Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:09.107114Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:09.113516Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.217040Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.451111Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:09.606339Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.651571Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491325366773673:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.651690Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.651963Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491325366773682:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.652017Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:12.739353Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.781767Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.827277Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.873027Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.917888Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.942140Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7608491303891935361:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:12.942265Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:12.971761Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.015567Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.081570Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:13.189831Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491329661741848:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.189955Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.190099Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491329661741853:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.190300Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491329661741855:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.190376Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:13.195090Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:13.208719Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491329661741857:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:13.304544Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491329661741908:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestProtocols::TestResolveProtocol >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TInterconnectTest::TestManyEvents >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> TInterconnectTest::TestBlobEvent >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::TestNotifyUndelivered >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:57.752356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:57.752458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.752533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:57.752576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:57.752617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:57.752655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:57.752687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:57.752730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:57.752795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.752871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:57.753872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:57.754189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:57.847520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:57.847579Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:57.859924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:57.860533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:57.860723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:57.871752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:57.872449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:57.873191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:57.873481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:57.876406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.876617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:57.877837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:57.877906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.878010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:57.878052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:57.878113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:57.878298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:58.041248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.042975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.043053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.043149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.043240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.043312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rtId: 0 2026-02-19T08:32:18.739541Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-02-19T08:32:18.739602Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:576: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-02-19T08:32:18.739647Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:228: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.739680Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:581: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-02-19T08:32:18.739890Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 Propose -> Done 2026-02-19T08:32:18.740167Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:32:18.744380Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.744546Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.744728Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.744884Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.745116Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.745293Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.745470Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:18.745508Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:18.745805Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:18.745853Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [15:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-02-19T08:32:18.746184Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:18.746229Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:32:18.746378Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:32:18.746421Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:18.746482Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:32:18.746541Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:18.746605Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-02-19T08:32:18.746670Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:18.746746Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:32:18.746796Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:32:18.747064Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 10 2026-02-19T08:32:18.747142Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-02-19T08:32:18.747201Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:32:18.747800Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:32:18.747896Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:32:18.747941Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:32:18.748003Z node 15 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:32:18.748058Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-02-19T08:32:18.748160Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-02-19T08:32:18.750653Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:32:18.750876Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:32:18.750920Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:32:18.751332Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:32:18.751417Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:32:18.751460Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [15:1407:3245] TestWaitNotification: OK eventTxId 104 2026-02-19T08:32:18.752021Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:18.752209Z node 15 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 227us result status StatusSuccess 2026-02-19T08:32:18.752745Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::RetryUploadRowsToShard >> TTxDataShardUploadRows::TestUploadRows >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> TBackupCollectionTests::DropErrorRecoveryTest [GOOD] >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart >> KqpAgg::AggWithSqlIn [GOOD] >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> KqpParams::InvalidJson [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> KqpExplain::Predicates [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::DefaultStorageConfig >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> TestSqsTopicHttpProxy::TestSendMessage >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateDropKesus >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityInvalid >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> KqpQuery::Pure [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: 2026-02-19T08:31:46.259770Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491213820435195:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:46.259829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:46.557822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:46.580307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:46.580449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:46.659657Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491213820435169:2081] 1771489906258339 != 1771489906258342 2026-02-19T08:31:46.671000Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:46.677816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:46.759075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:46.779649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:46.779673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:46.779691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:46.779801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:47.195829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:47.202227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:47.255005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.282544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:47.402224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.563682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.638830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.455031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226705338945:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.455201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.455684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226705338955:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.455754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.788581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.814521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.843378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.870773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.896044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.930547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.005679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.067424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.139352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231000307120:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.139456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.139566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231000307125:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.139597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231000307127:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.139636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.143143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:50.158423Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491231000307129:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:50.214827Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491231000307180:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:51.260191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491213820435195:2067];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:51.260262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:51.652354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... cpp:689) 2026-02-19T08:32:11.827174Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:11.939993Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:11.996878Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:12.073581Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.659447Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491335208755424:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.659526Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.659933Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491335208755433:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.659979Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:14.734848Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.765522Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.800463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.838144Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.871753Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.909456Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.948063Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.999651Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.090208Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491339503723608:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.090287Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.090351Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491339503723613:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.090700Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491339503723616:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.090794Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.095463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:15.105537Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491339503723615:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:15.166876Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491339503723668:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:15.862323Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491318028884475:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:15.862401Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:17.196462Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.241404Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.313469Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: 2026-02-19T08:31:46.188344Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491213871637968:2179];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:46.189384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:46.429867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:46.459990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:46.460126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:46.508324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:46.510671Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:46.515023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491213871637809:2081] 1771489906162333 != 1771489906162336 2026-02-19T08:31:46.620484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:46.620514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:46.620523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:46.620630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:46.625252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:47.029152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:47.078102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.189224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:47.198094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.344100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.408777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.329625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226756541585:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.329784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.330378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226756541595:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.330467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.672441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.708700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.746485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.779457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.811060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.852679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.914785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.989971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.072986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231051509770:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.073084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.073499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231051509775:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.073558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491231051509776:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.073606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:50.078752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:50.090432Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491231051509779:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:50.192103Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491231051509830:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:51.185315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491213871637968:2179];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:51.185621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:51.742341Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YjI0YjJmYTktMmMzNmY5NDgtMzg5OTVjZTktOThkMjg2MmI=, ActorId: [1:7608491235346477422:2536], ActorState: ExecuteState, LegacyTraceId: 01khtgdrspb398wbg05z1n15jd, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1842: ydb/core/kqp/qu ... _finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:13.400272Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:13.401271Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491331002760014:2081] 1771489933302431 != 1771489933302434 2026-02-19T08:32:13.434462Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:13.434544Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:13.444518Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:13.514165Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:13.529338Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:13.529367Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:13.529376Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:13.529485Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:14.065055Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:14.081318Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.150432Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.332815Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:14.376837Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.449917Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.180770Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348182631076:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.180887Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.181205Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348182631085:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.181283Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.260267Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.295934Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.330482Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.362317Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.403258Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.439554Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.473098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.522353Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.590810Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348182631955:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.590901Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.591010Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348182631960:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.591067Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348182631961:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.591102Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.594050Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:17.603140Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491348182631964:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:17.702839Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491348182632015:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:18.303876Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491331002760049:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:18.303946Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:19.616855Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:19.737855Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=NTFiNjcwYWMtNzJkOWJmNDUtNjJiNGZhZDEtMWI3MTExYzA=, ActorId: [5:7608491356772566919:2538], ActorState: ExecuteState, LegacyTraceId: 01khtgem6cb42szsqzgmff1adh, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1388: Invalid Json value status# BAD_REQUEST issues# trace_id#
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1388: Invalid Json value >> TInterconnectTest::OldFormat >> KqpNamedExpressions::NamedExpressionRandomDataQuery-UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomInsertDataQuery+UseSink >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: 2026-02-19T08:31:44.999851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491205219094711:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.000165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.253749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.295830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.295963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.345880Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491205219094660:2081] 1771489904976963 != 1771489904976966 2026-02-19T08:31:45.364055Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.369302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.481028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.573464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.573491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.573501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.573580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:46.006598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.039673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.090031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.256399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.408204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.481962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.400777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222398965724:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.400914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.401345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222398965734:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.401388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.699658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.730992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.762690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.796438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.830075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.872169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.916659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.974601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.055891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226693933903:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.055994Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.056189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226693933908:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.056232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491226693933909:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.056346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.059961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.071092Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491226693933912:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:49.161395Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491226693933963:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:49.997921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491205219094711:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:49.998010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadCol ... ":0},"PlanNodeType":"Query"}} 2026-02-19T08:32:13.188450Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491331042597526:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:13.188896Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:13.204645Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:13.304523Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491331042597475:2081] 1771489933186317 != 1771489933186320 2026-02-19T08:32:13.316163Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:13.317993Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:13.318084Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:13.322282Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:13.392361Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:13.407323Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:13.407361Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:13.407372Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:13.407459Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:13.946315Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:13.966953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.029175Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.193560Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:14.230297Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:14.324731Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.043265Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348222468543:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.043365Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.043579Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348222468552:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.043623Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.129402Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.163074Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.196268Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.227498Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.260570Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.297346Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.330501Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.376908Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.455254Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348222469423:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.455361Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.455399Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348222469428:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.455531Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491348222469430:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.455578Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:17.459359Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:17.470831Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491348222469431:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:17.533698Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491348222469483:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:18.188363Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491331042597526:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:18.188446Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:19.122147Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TSchemeShardTest::DefaultStorageConfig [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpAgg::AggWithSqlIn [GOOD] Test command err: 2026-02-19T08:30:42.056219Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490939298220229:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:42.056425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:42.263603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:30:42.271922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:30:42.272063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:30:42.359639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:30:42.362086Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:30:42.500129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:30:42.500157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:30:42.500170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:30:42.500302Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:30:42.532615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:30:42.960368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:30:43.018707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.088524Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:30:43.172757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.329284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:43.400155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.395560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490952183123854:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.395648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.396014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490952183123864:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.396067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:45.743834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.777903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.808005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.837987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.865119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.897005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.928248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:45.973015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:30:46.040815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490956478092034:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.040883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.041050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490956478092039:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.041406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490956478092041:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.041449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:30:46.044541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:30:46.056018Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490956478092043:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:30:46.151896Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490956478092096:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:47.057876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490939298220229:2157];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:47.057944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:30:50.067429Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608490973780246158:2145];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:50.067482Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:30:50.085419Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check scr ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.106538Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.106555Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.113914Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.114014Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.114047Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.116438Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.116501Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.116516Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.122420Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.122483Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.122498Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.127437Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.127508Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.127523Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.132433Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.132518Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.132537Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.137544Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.137648Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.137666Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.142564Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.142639Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.142655Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.147576Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.147648Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.147665Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.152586Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.152652Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.152687Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.157686Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.158099Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.158122Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.162611Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.162693Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.162711Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:32:08.208828Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491308413674784:3459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.209002Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.209468Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491308413674789:3462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.209531Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7608491308413674790:3463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.209734Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:08.215113Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:08.236620Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7608491308413674793:3464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-02-19T08:32:08.309971Z node 8 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [8:7608491308413674844:6590] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:09.145834Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:32:09.145880Z node 8 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2026-02-19T08:32:21.047686Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @209 (null) -> PendingActivation 2026-02-19T08:32:21.047750Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2026-02-19T08:32:21.049286Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @209 (null) -> PendingActivation 2026-02-19T08:32:21.049375Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:10:2048] [node 5] ICP01 ready to work 2026-02-19T08:32:21.049623Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-02-19T08:32:21.051872Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:13247 2026-02-19T08:32:21.052039Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @495 PendingNodeInfo -> PendingConnection 2026-02-19T08:32:21.053294Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:756: Handshake [5:21:2058] [node 6] ICH01 starting outgoing handshake 2026-02-19T08:32:21.053526Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-02-19T08:32:21.055521Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:760: Handshake [5:21:2058] [node 6] ICH05 connected to peer 2026-02-19T08:32:21.057731Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:34212 2026-02-19T08:32:21.058269Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1046: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2026-02-19T08:32:21.063792Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 590360 ProgramStartTime: 9276131706966 Serial: 3124256950 ReceiverNodeId: 6 SenderActorId: "[5:3124256950:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 590360" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 590360" AcceptUUID: "Cluster for process with id: 590360" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "s\211\372\274\0304\264^\372{\256\241\362\344\034B\357\341\233\005\363\236h\031:^\026\216i\035\364\023" RequestXxhash: true RequestXdcShuffle: true 2026-02-19T08:32:21.064570Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [6:23:2058] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 590360 ProgramStartTime: 9276131706966 Serial: 3124256950 ReceiverNodeId: 6 SenderActorId: "[5:3124256950:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 590360" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 590360" AcceptUUID: "Cluster for process with id: 590360" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "s\211\372\274\0304\264^\372{\256\241\362\344\034B\357\341\233\005\363\236h\031:^\026\216i\035\364\023" RequestXxhash: true RequestXdcShuffle: true 2026-02-19T08:32:21.064663Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [6:23:2058] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-02-19T08:32:21.065147Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-02-19T08:32:21.066556Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [6:10:2048] [node 5] ICP02 configured for host ::1:8657 2026-02-19T08:32:21.066614Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:263: Proxy [6:10:2048] [node 5] ICP17 incoming handshake (actor [6:23:2058]) 2026-02-19T08:32:21.066671Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @495 PendingNodeInfo -> PendingConnection 2026-02-19T08:32:21.066727Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2026-02-19T08:32:21.066785Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:220: Proxy [6:10:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2026-02-19T08:32:21.066848Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @221 PendingConnection -> PendingConnection 2026-02-19T08:32:21.067383Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [6:23:2058] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 590360 ProgramStartTime: 9276148964690 Serial: 592902139 SenderActorId: "[6:592902139:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 590360" AcceptUUID: "Cluster for process with id: 590360" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-02-19T08:32:21.067946Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [5:21:2058] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 590360 ProgramStartTime: 9276148964690 Serial: 592902139 SenderActorId: "[6:592902139:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 590360" AcceptUUID: "Cluster for process with id: 590360" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-02-19T08:32:21.068027Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [5:21:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-02-19T08:32:21.068178Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-02-19T08:32:21.069092Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "s\211\372\274\0304\264^\372{\256\241\362\344\034B\357\341\233\005\363\236h\031:^\026\216i\035\364\023" 2026-02-19T08:32:21.069218Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:419: Handshake [5:21:2058] [node 6] ICH04 handshake succeeded 2026-02-19T08:32:21.069574Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:341: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-02-19T08:32:21.069642Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:467: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:21:2058] poison: false 2026-02-19T08:32:21.069696Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @356 PendingConnection -> StateWork 2026-02-19T08:32:21.069911Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:383: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:25:2048] 2026-02-19T08:32:21.070707Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:291: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:21:2058] self: [5:3124256950:0] peer: [6:592902139:0] socket: 24 qp: -1 2026-02-19T08:32:21.070765Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:320: Session [5:25:2048] [node 6] ICS10 traffic start 2026-02-19T08:32:21.071676Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-02-19T08:32:21.071764Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-02-19T08:32:21.072537Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:375: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-02-19T08:32:21.073419Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-02-19T08:32:21.073489Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:253: Session [5:25:2048] [node 6] ICS04 subscribe for session state for [5:19:2057] 2026-02-19T08:32:21.073619Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:34222 2026-02-19T08:32:21.073811Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:256: InputSession [5:26:2048] [node 6] ICIS01 InputSession created 2026-02-19T08:32:21.074292Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1046: Handshake [6:27:2059] [node 0] ICH02 starting incoming handshake 2026-02-19T08:32:21.074515Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.075225Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.081496Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.081594Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.082026Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:419: Handshake [6:23:2058] [node 5] ICH04 handshake succeeded 2026-02-19T08:32:21.082315Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:337: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-02-19T08:32:21.082372Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:452: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:23:2058] poison: false 2026-02-19T08:32:21.082437Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @356 PendingConnection -> StateWork 2026-02-19T08:32:21.082552Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:383: Proxy [6:10:2048] [node 5] ICP22 created new session: [6:28:2048] 2026-02-19T08:32:21.082610Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:291: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:23:2058] self: [6:592902139:0] peer: [5:3124256950:0] socket: 25 qp: -1 2026-02-19T08:32:21.082659Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:320: Session [6:28:2048] [node 5] ICS10 traffic start 2026-02-19T08:32:21.082730Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-02-19T08:32:21.082779Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-02-19T08:32:21.082821Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:375: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-02-19T08:32:21.082867Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-02-19T08:32:21.082979Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:256: InputSession [6:29:2048] [node 5] ICIS01 InputSession created 2026-02-19T08:32:21.083031Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-02-19T08:32:21.083074Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-02-19T08:32:21.083181Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [6:29:2048] [node 5] ICIS02 ReceiveData called 2026-02-19T08:32:21.083253Z node ... 2026-02-19T08:32:21.093333Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:204: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:34:2060]) is held 2026-02-19T08:32:21.094080Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-02-19T08:32:21.094791Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:661: Handshake [5:32:2059] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\257\300~\230PK+!\230\340\371n\251\017\016\232\300\rR\315+8:\370\036\177a\3110\272\036\220" 2026-02-19T08:32:21.094909Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:419: Handshake [5:32:2059] [node 6] ICH04 handshake succeeded 2026-02-19T08:32:21.095159Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:341: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-02-19T08:32:21.095199Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:452: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:34:2060] poison: true 2026-02-19T08:32:21.095243Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:467: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:32:2059] poison: false 2026-02-19T08:32:21.095275Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @356 StateWork -> StateWork 2026-02-19T08:32:21.095330Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:291: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:32:2059] self: [5:3124256950:0] peer: [6:592902139:0] socket: 30 qp: -1 2026-02-19T08:32:21.095367Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:320: Session [5:25:2048] [node 6] ICS10 traffic start 2026-02-19T08:32:21.095433Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-02-19T08:32:21.095475Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.095513Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:375: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2026-02-19T08:32:21.095597Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.095674Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:34240 2026-02-19T08:32:21.096132Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1046: Handshake [6:38:2062] [node 0] ICH02 starting incoming handshake 2026-02-19T08:32:21.096870Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:256: InputSession [5:37:2048] [node 6] ICIS01 InputSession created 2026-02-19T08:32:21.097695Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:421: Proxy [6:10:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:8657) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:35:2061] held: no 2026-02-19T08:32:21.097766Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:467: Proxy [6:10:2048] [node 5] ICP052 dropped outgoing handshake: [6:30:2060] poison: false 2026-02-19T08:32:21.097811Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:444: Proxy [6:10:2048] [node 5] ICP28 other handshake is still going on 2026-02-19T08:32:21.098366Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.098449Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.098666Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:419: Handshake [6:35:2061] [node 5] ICH04 handshake succeeded 2026-02-19T08:32:21.098846Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:337: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-02-19T08:32:21.098887Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:452: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:35:2061] poison: false 2026-02-19T08:32:21.098915Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [6:10:2048] [node 5] ICP77 @356 StateWork -> StateWork 2026-02-19T08:32:21.098958Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:291: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:35:2061] self: [6:592902139:0] peer: [5:3124256950:0] socket: 31 qp: -1 2026-02-19T08:32:21.098993Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:320: Session [6:28:2048] [node 5] ICS10 traffic start 2026-02-19T08:32:21.099052Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:336: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-02-19T08:32:21.099096Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.099133Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:80: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2026-02-19T08:32:21.099200Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1044: Session [6:28:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-02-19T08:32:21.099245Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:375: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2026-02-19T08:32:21.099269Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.099321Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.099379Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.099458Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:256: InputSession [6:39:2048] [node 5] ICIS01 InputSession created 2026-02-19T08:32:21.099560Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.099582Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.099602Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-02-19T08:32:21.099648Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2026-02-19T08:32:21.099709Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.099725Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.099910Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-02-19T08:32:21.099933Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.099965Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.099994Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.100118Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-02-19T08:32:21.100142Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.100161Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.100176Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-02-19T08:32:21.100198Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.100213Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.100247Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:176: Session [6:28:2048] [node 5] ICS02 send event from: [6:20:2057] to: [5:19:2057] 2026-02-19T08:32:21.100302Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:978: Session [6:28:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2026-02-19T08:32:21.100388Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.100426Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:345: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2026-02-19T08:32:21.100452Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2026-02-19T08:32:21.100498Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:971: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-02-19T08:32:21.100525Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.100540Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-02-19T08:32:21.100565Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-02-19T08:32:21.100599Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:80: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2026-02-19T08:32:21.100661Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1044: Session [5:25:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-02-19T08:32:21.100699Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:989: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-02-19T08:32:21.100753Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:117: Session [5:25:2048] [node 6] ICS01 socket: 30 reason# 2026-02-19T08:32:21.100800Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:555: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:25:2048] VirtualId# [5:3124256950:0] 2026-02-19T08:32:21.100834Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:183: Proxy [5:1:2048] [node 6] ICP77 @209 StateWork -> PendingActivation 2026-02-19T08:32:21.100874Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:588: Session [5:25:2048] [node 6] ICS25 shutdown socket, reason# 2026-02-19T08:32:21.100960Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:555: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: 2026-02-19T08:31:40.983330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491188196092479:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.983529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.200845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.238353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.238505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.296762Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.298065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.298586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491188196092284:2081] 1771489900970210 != 1771489900970213 2026-02-19T08:31:41.433657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:41.474845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.474864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.474869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.474930Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.953363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.962777Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.016058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.162456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.353211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.428300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.379770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205375963347:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.379896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.380236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491205375963357:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.380298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.714948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.751173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.787755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.824782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.859655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.908820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.954048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.016307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.125265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209670931535:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.125353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.125617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209670931540:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.125684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491209670931541:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.125820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.129147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:45.144696Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491209670931544:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:45.239785Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491209670931595:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.982992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491188196092479:2203];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.983053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:47.926973Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491219565812082:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:47.927048Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... column: 30 } message: "At function: KiWriteTable!" end_position { row: 12 column: 30 } severity: 1 issues { position { row: 12 column: 30 } message: "Cannot find table \'db.[/Root/Test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 12 column: 30 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:14.443413Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491333325226028:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:14.444402Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:14.465792Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:14.572651Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:14.572747Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:14.573235Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491333325225989:2081] 1771489934440916 != 1771489934440919 2026-02-19T08:32:14.586321Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:14.589093Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:14.670077Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:14.670103Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:14.670113Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:14.670199Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:14.671294Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:15.187277Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:15.204386Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.306125Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.463789Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:15.475758Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.609579Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.194180Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491350505097049:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.194265Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.194706Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491350505097058:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.194790Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.286972Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.326916Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.359801Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.392217Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.423967Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.456827Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.492304Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.546253Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.627457Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491350505097928:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.627560Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.627611Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491350505097933:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.627851Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491350505097935:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.627903Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.631751Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:18.643972Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491350505097936:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:18.725835Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491350505097988:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:19.443263Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491333325226028:2078];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:19.443340Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TestProtocols::TestHTTPRequest [GOOD] >> KqpQuery::CreateTableAs_MkDir [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] Test command err: 2026-02-19T08:31:45.075892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491208084337781:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.076575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.428106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.428231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.454150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.480261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491208084337725:2081] 1771489905039762 != 1771489905039765 2026-02-19T08:31:45.487731Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.493480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.616481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.616501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.616508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.616585Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.668124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:46.075800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.138309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.148434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:46.208561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.396277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.566598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.650844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.443789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491220969241492:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.443929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.444281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491220969241502:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.444350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.745606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.780234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.811048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.841459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.879139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.924465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.960891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.022330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.105684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225264209671:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.105766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.105822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225264209676:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.105997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225264209678:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.106033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.109559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.120688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491225264209679:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:49.178249Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491225264209731:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.058593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491208084337781:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.058665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:51.676075Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491233940988195:2070];send_to=[0:7307199536658146131:7762515 ... 2-19T08:32:14.290280Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:14.309068Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:14.431241Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:14.433111Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:14.433185Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:14.437283Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491335515841015:2081] 1771489934284623 != 1771489934284626 2026-02-19T08:32:14.464355Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:14.523520Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:14.523546Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:14.523557Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:14.523647Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:14.574061Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:15.046777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:15.054786Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.128968Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.287871Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.289992Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:15.362992Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.321031Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491352695712080:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.321146Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.321438Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491352695712089:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.321501Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.400960Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.435070Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.469689Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.502452Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.534766Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.570615Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.604505Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.653546Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.726188Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491352695712957:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.726275Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.726307Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491352695712962:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.726447Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491352695712964:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.726495Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:18.730183Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:18.741053Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491352695712966:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:18.794809Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491352695713017:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:19.286676Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491335515841070:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:19.286765Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:20.998153Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=N2RhOTQwNGItODgwMzc0NDAtMmJiNTdiYjgtNDQ2NDk5ODk=, ActorId: [5:7608491361285647922:2538], ActorState: ExecuteState, LegacyTraceId: 01khtgemvy46vtctd26ddx6f3v, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1842: ydb/core/kqp/query_data/kqp_query_data.cpp:263: Parameter $x type mismatch, expected: Type (List) { List item type: { Type (Data), schemeType: Uint32, schemeTypeId: 2 } } , actual: Type (EmptyList) status# BAD_REQUEST issues# trace_id# >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> KqpStats::DeferredEffects-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2026-02-19T08:32:22.606008Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [4:22:2057] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-02-19T08:32:23.084997Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [5:20:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-02-19T08:32:23.560592Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [8:22:2057] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-02-19T08:32:23.563165Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:550: Handshake [7:20:2058] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> KqpQuery::QueryExplain [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] Test command err: 2026-02-19T08:31:40.282685Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491187594710451:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:40.282782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:40.540030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:40.565467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:40.565561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:40.632747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:40.664924Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:40.665823Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491187594710414:2081] 1771489900280710 != 1771489900280713 2026-02-19T08:31:40.750516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:40.762434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:40.762457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:40.762474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:40.762592Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.160336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:41.297957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:43.248366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491200479613195:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.248409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491200479613206:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.248461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.248763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491200479613210:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.248823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:43.252724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:43.264342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491200479613209:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:31:43.395771Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491200479613264:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:43.645442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.086094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.306229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:31:44.314661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:31:44.321569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:31:45.374963Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491211427093378:2256];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.375036Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.396587Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:45.537791Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.545883Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.545972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.547936Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491211427093151:2081] 1771489905341709 != 1771489905341712 2026-02-19T08:31:45.573423Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.583005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.719611Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.719629Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.719635Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.719696Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:45.779714Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:46.141932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.148747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:46.377746Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:48.563793Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491224311995926:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.563860Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491224311995939:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.563919Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.564229Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491224311995943:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.564309Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource poo ... nd or you don't have access permissions } 2026-02-19T08:32:15.250149Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:15.252886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:15.260673Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491338366780876:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:32:15.346432Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491338366780929:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:15.383724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.425904Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:15.794442Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:16.028953Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:16.035266Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:16.079695Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491321186910810:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:16.079767Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:16.316716Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491342661748898:2944] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:16.329517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.056006Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:17.063524Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:18.369082Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7608491350327713929:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:18.369173Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:18.382865Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:18.474970Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:18.477000Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491350327713898:2081] 1771489938367970 != 1771489938367973 2026-02-19T08:32:18.508048Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:18.508137Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:18.520009Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:18.570346Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:18.570385Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:18.570395Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:18.570489Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:18.599237Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:19.022970Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:19.374823Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:21.936020Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491363212616670:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.936096Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491363212616679:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.936107Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.936585Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491363212616684:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.936671Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.940049Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:21.949377Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491363212616685:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:32:22.046202Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491367507584033:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:22.125383Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.281315Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:22.291565Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:22.302813Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:56.615555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:56.615640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.615706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:56.615744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:56.615778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:56.615811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:56.615839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:56.615882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:56.615938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.616661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:56.617499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:56.620070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:56.710909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:56.710967Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:56.727955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:56.728557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:56.728752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:56.739160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:56.739887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:56.740519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:56.740788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:56.744790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.744972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:56.746078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:56.746136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.746243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:56.746282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:56.746341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:56.746499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.900459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.902992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.903053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.903136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.903220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.903298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.903354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... s: StatusAccepted 2026-02-19T08:32:24.111949Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-02-19T08:32:24.112014Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2026-02-19T08:32:24.112090Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:24.112241Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:24.114782Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-02-19T08:32:24.114916Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:808:2776], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:24.115234Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2026-02-19T08:32:24.115341Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:32:24.115498Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2026-02-19T08:32:24.115541Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2026-02-19T08:32:24.115576Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000043 2026-02-19T08:32:24.115745Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:24.115817Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 73014446191 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:24.115865Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000043 2026-02-19T08:32:24.115947Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710760:0 Propose -> Done 2026-02-19T08:32:24.117823Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-02-19T08:32:24.117875Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-02-19T08:32:24.117961Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:24.117993Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:24.118037Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:24.118067Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:24.118129Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-02-19T08:32:24.118199Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:129:2153] message: TxId: 281474976710760 2026-02-19T08:32:24.118249Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:24.118297Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-02-19T08:32:24.118329Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710760:0 2026-02-19T08:32:24.118415Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-02-19T08:32:24.120248Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-02-19T08:32:24.120311Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710760 2026-02-19T08:32:24.120374Z node 17 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710760 2026-02-19T08:32:24.120508Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:808:2776], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-02-19T08:32:24.122058Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-02-19T08:32:24.122185Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:808:2776], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:24.122236Z node 17 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:32:24.123750Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-02-19T08:32:24.123872Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:808:2776], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:24.123925Z node 17 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-02-19T08:32:24.124089Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:24.124174Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:897:2854] TestWaitNotification: OK eventTxId 103 >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: 2026-02-19T08:31:41.232670Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491190281832393:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.233831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.559794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.559900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.625913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.628890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.667285Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.752725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.752750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.752758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.752890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.905059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:42.163082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:42.215391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.230664Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:42.379353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.584671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.661355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.474111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491203166736091:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.474230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.474570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491203166736101:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.474626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.806987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.837647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.868141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.901293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.930141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:44.967779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.013571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.082721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:45.174202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491207461704265:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.174329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.178042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491207461704270:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.178142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491207461704271:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.178306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:45.185879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:45.200408Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491207461704274:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:45.291434Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491207461704327:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:46.225689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491190281832393:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:46.225789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:48.486524Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491224008428245:2191];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:48.486664Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:48.510307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check scr ... 692724992:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:16.295125Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:16.315423Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:16.453214Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:16.455241Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491341692724962:2081] 1771489936290819 != 1771489936290822 2026-02-19T08:32:16.469217Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:16.471169Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:16.471272Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:16.485829Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:16.558841Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:16.558869Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:16.558882Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:16.559055Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:16.714188Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:17.080249Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:17.090463Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.145525Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.298593Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:17.304728Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:17.368412Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.067038Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491358872596032:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.067136Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.067435Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491358872596042:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.067486Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.139003Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.180204Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.214794Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.246254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.282140Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.319191Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.358508Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.405266Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:20.479854Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491358872596913:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.479943Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.479945Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491358872596918:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.480131Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491358872596920:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.480190Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:20.482995Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:20.493502Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491358872596921:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:20.569926Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491358872596973:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:21.292642Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491341692724992:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:21.292714Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At lambda, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] Test command err: 2026-02-19T08:28:32.982072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490379932529041:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.982135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:33.057835Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:28:33.293838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:33.320492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:33.320613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:33.378861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.437043Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:33.445919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490379932528801:2081] 1771489712949820 != 1771489712949823 2026-02-19T08:28:33.487989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:33.623902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f3b/r3tmp/yandexRajeUX.tmp 2026-02-19T08:28:33.623920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f3b/r3tmp/yandexRajeUX.tmp 2026-02-19T08:28:33.624066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f3b/r3tmp/yandexRajeUX.tmp 2026-02-19T08:28:33.624138Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:33.662860Z INFO: TTestServer started on Port 13653 GrpcPort 22316 PQClient connected to localhost:22316 2026-02-19T08:28:33.953074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:33.974520Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:28:33.975489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-02-19T08:28:34.038676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:34.182949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:28:34.202153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-02-19T08:28:36.470945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397112399023:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.471356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397112399042:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.471536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.472486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490397112399047:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.472580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:36.475628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:36.487705Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490397112399046:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:28:36.797263Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490397112399112:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.827999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.866826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.960463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.970465Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490397112399120:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:28:36.972824Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=OTUxYWExMzUtZDQ2NGFjMDUtN2RmZWMwZDItZjVhYzA0ZGM=, ActorId: [1:7608490397112399004:2333], ActorState: ExecuteState, LegacyTraceId: 01khtg7t7kefknsehffkt5x8yt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:28:36.974894Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608490401407366693:2823] 2026-02-19T08:28:37.981831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490379932529041:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.981900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:28:43.151311Z :WriteToTopic_Demo_20_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:28:43.162866Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:28:43.177204Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608490427177170687:2927] connected; active server actors: 1 2026-02-19T08:28:43.177778Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-02-19T08:28:43.180392Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:765: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-02-19T08:28:43.180548Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:145: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-02-19T08:28:43.206670Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:28:43.207621Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 ... 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.750440Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.755961Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.756004Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.756025Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.756044Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.756059Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.850629Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.850668Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.850688Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.850709Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.850724Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.856260Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.856287Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.856301Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.856316Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.856331Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.913435Z :INFO: [/Root] [/Root] [5a427129-92f31889-193d8a57-164d5fdf] Closing read session. Close timeout: 0.000000s 2026-02-19T08:32:23.913494Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:9:10 2026-02-19T08:32:23.913535Z :INFO: [/Root] [/Root] [5a427129-92f31889-193d8a57-164d5fdf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 1000000 MessagesRead: 10 BytesReadCompressed: 1000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-02-19T08:32:23.913613Z :NOTICE: [/Root] [/Root] [5a427129-92f31889-193d8a57-164d5fdf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-02-19T08:32:23.913650Z :DEBUG: [/Root] [/Root] [5a427129-92f31889-193d8a57-164d5fdf] [] Abort session to cluster 2026-02-19T08:32:23.914230Z :NOTICE: [/Root] [/Root] [5a427129-92f31889-193d8a57-164d5fdf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-02-19T08:32:23.914849Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-02-19T08:32:23.915021Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0] PartitionId [0] Generation [2] Write session will now close 2026-02-19T08:32:23.915092Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0] PartitionId [0] Generation [2] Write session: aborting 2026-02-19T08:32:23.915325Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_11761087544329341904_v1 grpc read done: success# 0, data# { } 2026-02-19T08:32:23.915365Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_11761087544329341904_v1 grpc read failed 2026-02-19T08:32:23.915412Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:1684: session cookie 1 consumer test-consumer session test-consumer_15_1_11761087544329341904_v1 closed 2026-02-19T08:32:23.915601Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-02-19T08:32:23.915651Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0] PartitionId [0] Generation [2] Write session: destroy 2026-02-19T08:32:23.915948Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_11761087544329341904_v1 is DEAD 2026-02-19T08:32:23.916217Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_11761087544329341904_v1 2026-02-19T08:32:23.916284Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608491363571398591:2518] destroyed 2026-02-19T08:32:23.916343Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_11761087544329341904_v1 2026-02-19T08:32:23.916396Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608491363571398588:2515] disconnected. 2026-02-19T08:32:23.916429Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608491363571398588:2515] disconnected; active server actors: 1 2026-02-19T08:32:23.916452Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608491363571398588:2515] client test-consumer disconnected session test-consumer_15_1_11761087544329341904_v1 2026-02-19T08:32:23.917935Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0 grpc read done: success: 0 data: 2026-02-19T08:32:23.917971Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0 grpc read failed 2026-02-19T08:32:23.918015Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0 2026-02-19T08:32:23.918031Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|7c64276b-fb999666-ec8ef9f6-18ba7ead_0 is DEAD 2026-02-19T08:32:23.918378Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:32:23.918570Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608491359276431248:2498] destroyed 2026-02-19T08:32:23.918612Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:32:23.918651Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.918666Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.918680Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.918699Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.918713Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.951003Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.951038Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.951067Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.951087Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.951101Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:23.956619Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:23.956654Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.956673Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:23.956707Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:23.956722Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.017041Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:32:24.017074Z node 15 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:24.051378Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.051421Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.051441Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.051463Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.051480Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.056974Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.057013Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.057032Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.057052Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.057067Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist |95.3%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: 2026-02-19T08:31:45.025806Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491209266962763:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.025870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:45.062224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:45.334498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:45.334605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:45.367565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:45.385833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491204971995422:2081] 1771489905023627 != 1771489905023630 2026-02-19T08:31:45.391683Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:45.399647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:45.555530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:45.581430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:45.581451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:45.581456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:45.581559Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:46.024129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.031976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:46.038209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.079046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.264789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.430443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.502384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.307748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222151866477:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.307860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.308289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222151866487:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.308350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.622018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.661261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.703400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.732950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.764699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.814322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.853271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.923187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:48.997923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222151867351:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.998020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.998399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222151867356:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.998436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491222151867357:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:48.998564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.003094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.014032Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491222151867360:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:49.083493Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491226446834709:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.029966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491209266962763:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.030051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.502025Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.652454Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.720618Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:18.842084Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:21.567124Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491362449856626:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.567241Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.567523Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491362449856635:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.567576Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.645993Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.675790Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.710973Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.749274Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.783149Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.816159Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.850384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.894384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:21.964480Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491362449857507:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.964600Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.964699Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491362449857512:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.964925Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491362449857514:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.964997Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:21.968461Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:21.979133Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491362449857515:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:22.065740Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491366744824863:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:22.756285Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491345269985610:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:22.756374Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:45" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 '('('"ItemsLimit" $3) '('"Sequential" '1) '('"PointPrefixLen" '1))) (let $5 (Uint32 '1)) (let $6 (KqpRowsSourceSettings $1 $2 $4 '((KqlKeyExc $5 (String '"Name")) (KqlKeyInc $5)))) (let $7 (OptionalType (DataType 'String))) (let $8 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $7) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $7))) (let $9 '('('"_logical_id" '715) '('"_id" '"61435c3a-4d80c314-123078ae-1ad168c5") '('"_wide_channels" $8))) (let $10 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $6)) (lambda '($14) (block '( (let $15 (lambda '($16) (Member $16 '"Amount") (Member $16 '"Comment") (Member $16 '"Group") (Member $16 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $14) $3) $15))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '"0"))) (let $12 (DqPhyStage '($11) (lambda '($17) (FromFlow (NarrowMap (Take (ToFlow $17) $3) (lambda '($18 $19 $20 $21) (AsStruct '('"Amount" $18) '('"Comment" $19) '('"Group" $20) '('"Name" $21)))))) '('('"_logical_id" '728) '('"_id" '"8f4b218b-7b78043e-1e5c038a-5c955152")))) (let $13 (DqCnResult (TDqOutput $12 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($10 $12) '($13) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $8) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.3%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> TSchemeShardTest::CreateSystemColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] Test command err: 2026-02-19T08:28:32.366235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490379635639507:2170];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:32.366446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:28:32.427096Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:28:32.640772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:28:32.814083Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490379635639366:2081] 1771489712337167 != 1771489712337170 2026-02-19T08:28:32.831228Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:28:32.839982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:28:32.840088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:28:32.845787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:28:32.863799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:28:33.103703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/002f3c/r3tmp/yandex1rZLfh.tmp 2026-02-19T08:28:33.103723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/002f3c/r3tmp/yandex1rZLfh.tmp 2026-02-19T08:28:33.103867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/002f3c/r3tmp/yandex1rZLfh.tmp 2026-02-19T08:28:33.103932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:28:33.144114Z INFO: TTestServer started on Port 11649 GrpcPort 23917 2026-02-19T08:28:33.377868Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:23917 2026-02-19T08:28:33.440879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:28:33.522735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:33.662643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-02-19T08:28:35.768304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392520542279:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.768450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.768871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392520542314:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.783544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392520542339:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.786369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.787485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:28:35.788368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392520542345:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.788531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.797455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490392520542350:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.797531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:28:35.807277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490392520542316:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-02-19T08:28:36.016404Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490392520542387:2649] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:28:36.061509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.107214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.198992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:28:36.210864Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608490396815509692:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:28:36.213465Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=YWNkMDI3ZDQtODMxYjlmMzctNGEwOTc0MGUtY2ExMzkzNTE=, ActorId: [1:7608490392520542274:2333], ActorState: ExecuteState, LegacyTraceId: 01khtg7sg378w27z4mbwrt3e49, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:28:36.216163Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7608490396815509986:2830] 2026-02-19T08:28:37.362058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490379635639507:2170];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:28:37.362153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-02-19T08:28:42.413290Z :Sinks_Oltp_WriteToTopic_1_Table INFO: TTopicSdkTestSetup started 2026-02-19T08:28:42.426000Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-02-19T08:28:42.443743Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7608490422585313985:2935] connected; active server actors: 1 2026-02-19T08:28:42.444404Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: r ... pipe [15:7608491361979639954:2582] client test-consumer disconnected session test-consumer_15_2_13090148889685938520_v1 2026-02-19T08:32:24.001796Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:32:24.001846Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:32:24.001900Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:32:24.002482Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:32:24.002521Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:32:24.002590Z node 15 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_15_1_2912338148002281529_v1 grpc read done: success# 0, data# { } 2026-02-19T08:32:24.002614Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_15_1_2912338148002281529_v1 grpc read failed 2026-02-19T08:32:24.002636Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_15_1_2912338148002281529_v1 grpc closed 2026-02-19T08:32:24.002671Z node 15 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_15_1_2912338148002281529_v1 is DEAD 2026-02-19T08:32:24.002830Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2324: [PQ: 72075186224037894] Destroy direct read session test-consumer_15_1_2912338148002281529_v1 2026-02-19T08:32:24.002865Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608491353389705290:2554] destroyed 2026-02-19T08:32:24.002903Z node 15 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_15_1_2912338148002281529_v1 2026-02-19T08:32:24.002988Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-02-19T08:32:24.003018Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0] PartitionId [0] Generation [1] Write session will now close 2026-02-19T08:32:24.003043Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0] PartitionId [0] Generation [1] Write session: aborting 2026-02-19T08:32:24.003134Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-02-19T08:32:24.003155Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0] PartitionId [0] Generation [1] Write session: destroy 2026-02-19T08:32:24.003146Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [15:7608491353389705287:2551] disconnected. 2026-02-19T08:32:24.003174Z node 15 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [15:7608491353389705287:2551] disconnected; active server actors: 1 2026-02-19T08:32:24.003190Z node 15 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [15:7608491353389705287:2551] client test-consumer disconnected session test-consumer_15_1_2912338148002281529_v1 2026-02-19T08:32:24.005048Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0 grpc read done: success: 0 data: 2026-02-19T08:32:24.005074Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0 grpc read failed 2026-02-19T08:32:24.005409Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 7 sessionId: test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0 2026-02-19T08:32:24.005434Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|8f58aee2-504f32b0-308d402d-5ff2cd0e_0 is DEAD 2026-02-19T08:32:24.005808Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:32:24.005864Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:32:24.005977Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [15:7608491353389705188:2532] destroyed 2026-02-19T08:32:24.006008Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037896] server disconnected, pipe [15:7608491353389705185:2532] destroyed 2026-02-19T08:32:24.006038Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:32:24.006075Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.006097Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.006116Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.006137Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.006152Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.006647Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0 grpc read done: success: 0 data: 2026-02-19T08:32:24.006676Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0 grpc read failed 2026-02-19T08:32:24.006707Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:837: session v1 closed cookie: 5 sessionId: test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0 2026-02-19T08:32:24.006719Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|e240c641-388c90e7-ebd8b913-47199eea_0 is DEAD 2026-02-19T08:32:24.006958Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:32:24.006994Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-02-19T08:32:24.007080Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608491353389705143:2524] destroyed 2026-02-19T08:32:24.007104Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72075186224037894] server disconnected, pipe [15:7608491353389705140:2524] destroyed 2026-02-19T08:32:24.007124Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-02-19T08:32:24.007141Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.007153Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.007164Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.007177Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.007189Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.015595Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.015646Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.015664Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.015699Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.015714Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.019452Z node 15 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:24.085287Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.085335Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.085353Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.085375Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.085392Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.096059Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.096106Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.096124Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.096164Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.096182Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037896][Partition][0][StateIdle] Try persist 2026-02-19T08:32:24.116028Z node 15 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:24.116073Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.116092Z node 15 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:24.116119Z node 15 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:24.116140Z node 15 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false [GOOD] >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd [GOOD] >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityInvalid [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-Size0-1 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1-3 >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TSchemeShardTTLTests::DynamicBatchingToggle >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd3Consumers >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTestsWithReboots::MoveTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:56.615549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:56.615638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.615699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:56.615744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:56.615785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:56.615817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:56.615854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:56.615886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:56.615943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.616670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:56.617510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:56.620064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:56.692037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:56.692106Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:56.703029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:56.703636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:56.704595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:56.714894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:56.716510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:56.718842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:56.721315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:56.726123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.727780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:56.737261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:56.737329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.737445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:56.737489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:56.737588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:56.737759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.746392Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-02-19T08:31:56.864903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:31:56.866251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.867189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-02-19T08:31:56.867260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:31:56.870641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:31:56.870749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:56.874061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:56.874240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:31:56.875122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.875185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:31:56.875277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:31:56.875320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:31:56.877658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.877746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:31:56.877791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:31:56.879583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.879624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.879675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:56.879731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:31:56.883909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:31:56.885620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:31:56.886817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-02-19T08:31:56.888803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:56.888926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:31:56.888973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:56.889267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:31:56.889315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:31:56.889466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:31:56.889543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: sch ... wnerId: 72057594046678944, LocalPathId: 39] was 0 2026-02-19T08:32:26.635425Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-02-19T08:32:26.635465Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 103:0 type: TxCopyTable target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:26.635519Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:32:26.635646Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-02-19T08:32:26.635818Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:32:26.636296Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:26.636360Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:26.638268Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-02-19T08:32:26.638529Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2026-02-19T08:32:26.638811Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:26.638869Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:26.639055Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-02-19T08:32:26.639161Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:26.639212Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-02-19T08:32:26.639307Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:212:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-02-19T08:32:26.639656Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:26.639729Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2026-02-19T08:32:26.639969Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:361: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-02-19T08:32:26.640752Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:26.640865Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:26.640915Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:26.640961Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:32:26.641015Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-02-19T08:32:26.641775Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:26.641840Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:26.641860Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:26.641884Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 1 2026-02-19T08:32:26.641907Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:32:26.641964Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:32:26.644537Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2026-02-19T08:32:26.644744Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2026-02-19T08:32:26.644813Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1858: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-02-19T08:32:26.645359Z node 17 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-02-19T08:32:26.645644Z node 17 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2026-02-19T08:32:26.646079Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6391: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-02-19T08:32:26.646155Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1872: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-02-19T08:32:26.646346Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-02-19T08:32:26.646423Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:179: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-02-19T08:32:26.646550Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:182: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-02-19T08:32:26.646694Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 CreateParts -> ConfigureParts 2026-02-19T08:32:26.647837Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:26.648845Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:26.652143Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:26.652558Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:26.652636Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2026-02-19T08:32:26.652728Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2026-02-19T08:32:26.657453Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-02-19T08:32:26.657617Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-02-19T08:32:26.657701Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2026-02-19T08:32:26.657754Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-false >> TSchemeShardTTLTestsWithReboots::CreateTable >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityBasic >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation [GOOD] >> TBackupCollectionTests::EmptyIncrementalBackupRace >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-true >> KqpQuery::MixedCreateAsSelect [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:57.154459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:57.154523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.154563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:57.154597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:57.154632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:57.154659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:57.154679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:57.154712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:57.154758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.154808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:57.155509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:57.155784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:57.240405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:57.240460Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:57.257846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:57.258704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:57.258912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:57.269529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:57.270257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:57.271072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:57.271349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:57.274239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.274434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:57.275585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:57.275650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.275753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:57.275797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:57.275854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:57.276057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:57.440904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.441953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.442906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.443000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... blishToSchemeBoard Send, to populator: [16:214:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:32:28.574249Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [16:214:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:32:28.574778Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.574874Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:28.576258Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:28.576400Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:28.576461Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:28.576524Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:32:28.576585Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:28.577341Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1741 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:28.577378Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:28.577491Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1741 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:28.577645Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1741 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-02-19T08:32:28.577925Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:28.577987Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:28.578012Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:28.578035Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:28.578063Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:28.578131Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:32:28.578964Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 747 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:28.579037Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:28.579229Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 747 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:28.579358Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:28.579539Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 747 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:28.579656Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.579726Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.579794Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:28.579869Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:28.588445Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:28.588597Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.588757Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:28.588857Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.589223Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.589291Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:28.589524Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:28.590463Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:28.590571Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:28.590637Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:28.590722Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:28.590842Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:713:2701] message: TxId: 102 2026-02-19T08:32:28.590933Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:28.591013Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:28.591068Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:28.591283Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:28.594742Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:28.594830Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [16:775:2751] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-02-19T08:32:28.598256Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" PartitionConfig { ChannelProfileId: 0 } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:28.598544Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.598933Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, at schemeshard: 72057594046678944 2026-02-19T08:32:28.601351Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Setting ChannelProfileId to 0 for tables with storage config is not allowed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:28.601639Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-false >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-true >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase |95.3%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.643160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.731449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.731527Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.743540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.744068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.744271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.752305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.752885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.754318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.757000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.764311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.937029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.937936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... O: schemeshard_impl.cpp:2707: Change state for txid 101:2 ProposedWaitParts -> Done 2026-02-19T08:32:29.776269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:29.776292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:29.776369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:29.776394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:29.776429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:29.776497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.776526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.776561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:29.776590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:29.778862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:29.778915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:29.780808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:29.780873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:29.780959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:2 ProgressState 2026-02-19T08:32:29.781503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-02-19T08:32:29.781526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-02-19T08:32:29.781551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-02-19T08:32:29.781573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-02-19T08:32:29.781619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-02-19T08:32:29.781945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.781981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:29.782024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:32:29.782039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:29.782073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:32:29.782093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:29.782111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-02-19T08:32:29.782149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:781:2746] message: TxId: 101 2026-02-19T08:32:29.782181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:29.782220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:29.782249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:29.782349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:29.782380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-02-19T08:32:29.782392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:1 2026-02-19T08:32:29.782423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:29.782438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-02-19T08:32:29.782449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:2 2026-02-19T08:32:29.782473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:32:29.784060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:29.784103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:782:2747] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:29.784481Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:29.784705Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 262us result status StatusSuccess 2026-02-19T08:32:29.785124Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.642089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.642179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.642247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.642304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.642361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.642412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.642443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.642472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.642556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.642648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.643561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.643840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.735657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.735726Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.750538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.754028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.754243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.765312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.766068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.766816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.767081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.770022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.770200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.932556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:29.508124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.508240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:32:29.508369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.508416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:32:29.508448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:32:29.508481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:32:29.509865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.509921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:32:29.509960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:32:29.511220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.511282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.511320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:29.511355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:29.511463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:29.512651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:29.512790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:29.513079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.513161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:29.513193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:29.513405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:29.513444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:29.513563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:29.513616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:29.515325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:29.515368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:29.515535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:29.515581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:32:29.515882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.515933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:29.516033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:29.516070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:29.516123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:29.516164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:29.516217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:29.516263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:29.516299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:29.516331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:29.516410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:29.516452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:29.516486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:29.516999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:29.517098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:29.517140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:29.517181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:29.517220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:29.517331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:29.520257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:29.520723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:32:29.525923Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:675:2663] Bootstrap 2026-02-19T08:32:29.527069Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:675:2663] Become StateWork (SchemeCache [1:680:2668]) 2026-02-19T08:32:29.529059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:29.529366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.531033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2026-02-19T08:32:29.531750Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:675:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:29.534678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:29.534903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-02-19T08:32:29.535389Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-false [GOOD] >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-Size0-1 [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1-3 [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.641724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.731461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.731534Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.748063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.749258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.749450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.761917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.762615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.763329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.763596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.767332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.767498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.949725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.950749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.950899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rontStep: 5000039 2026-02-19T08:32:29.767566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.767633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:29.768063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.768147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.768182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:29.768219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:29.768289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:29.768371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:32:29.770776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:29.782829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1055 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.782887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:29.783058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1055 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.783204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1055 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:32:29.784104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.784152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:29.784299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.784360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:29.784471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.784546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.784584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.784636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:29.784683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:29.786361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.787220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.787517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.787563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:29.787661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.787697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.787735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.787765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.787800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:29.787858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 102 2026-02-19T08:32:29.787924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.787972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:29.788016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:29.788152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:29.789549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:29.789596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:798:2766] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:29.790139Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:29.790376Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 248us result status StatusSuccess 2026-02-19T08:32:29.790897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:57.646435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:57.646531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.646594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:57.646637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:57.646670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:57.646702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:57.646728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:57.646754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:57.646822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.646898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:57.647725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:57.648017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:57.740641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:57.740693Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:57.752188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:57.752762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:57.752937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:57.764842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:57.765547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:57.766255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:57.766556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:57.769549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.769776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:57.770939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:57.771004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.771106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:57.771149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:57.771205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:57.771383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:57.935169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.936976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.937050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.937131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:57.937218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 046 ParentPathId: 80 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 85 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:28.968196Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:28.968522Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 360us result status StatusSuccess 2026-02-19T08:32:28.969089Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 91 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:28.970443Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:28.970722Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 332us result status StatusSuccess 2026-02-19T08:32:28.971236Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 86 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:28.972485Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:28.972811Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 343us result status StatusSuccess 2026-02-19T08:32:28.973357Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 94 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.643083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.721091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.721150Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.735924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.736810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.738582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.750194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.752008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.754223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.756841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.763984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.937129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.937974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.938928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... tion_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:29.838503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.838578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.838605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:29.838629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:29.838671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:32:29.838729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:32:29.840416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:29.852174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 931 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.852209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:29.852328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 931 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.852446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 931 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:32:29.853202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.853248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:29.853377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.853429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:29.853497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.853580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.853621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.853653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:29.853696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:29.855863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.855993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.856212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.856262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:29.856358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.856407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.856456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.856490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.856521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:29.856572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:781:2746] message: TxId: 102 2026-02-19T08:32:29.856615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.856648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:29.856671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:29.856775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:29.858280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:29.858324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:865:2822] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:29.858673Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:29.858908Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 201us result status StatusSuccess 2026-02-19T08:32:29.859345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTTLTests::DynamicBatchingToggle [GOOD] >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.640971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.641044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.641115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.641157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.641210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.641249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.641285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.641314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.641412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.641487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.642375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.642631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.737001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.737065Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.748777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.749342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.749523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.760888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.761586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.762284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.762559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.765511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.941998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.942931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... mon.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:29.844047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.844167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:29.844213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:29.844253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:29.844295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-02-19T08:32:29.844367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:32:29.846970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:29.859250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1110 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.859322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:29.859466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1110 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:29.859564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1110 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:32:29.860130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 4294970000 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.860177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:29.860316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 724 RawX2: 4294970000 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.860377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:29.860455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 724 RawX2: 4294970000 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:29.860528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.860575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.860610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:29.860689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:29.863285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.863666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.863917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.863979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:29.864077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.864118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.864155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:29.864182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.864214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:29.864272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2742] message: TxId: 102 2026-02-19T08:32:29.864310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:29.864363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:29.864393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:29.864493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:29.865981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:29.866024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:910:2831] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:29.866475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:29.866754Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 256us result status StatusSuccess 2026-02-19T08:32:29.867294Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.641697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.726952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.727017Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.739653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.742004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.742207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.750680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.752004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.754194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.756839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.764033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.954521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.955564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.955722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.955833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.955897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.955961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.956625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... de 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.060239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:30.060293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-02-19T08:32:30.060342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710760:0 Propose -> Done 2026-02-19T08:32:30.061889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.061934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-02-19T08:32:30.061999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:30.062038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:30.062070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:30.062091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:30.062117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-02-19T08:32:30.062159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:129:2153] message: TxId: 281474976710760 2026-02-19T08:32:30.062199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:30.062230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-02-19T08:32:30.062267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710760:0 2026-02-19T08:32:30.062319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-02-19T08:32:30.063775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-02-19T08:32:30.063822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710760 2026-02-19T08:32:30.063873Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-02-19T08:32:30.063957Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-02-19T08:32:30.065267Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-02-19T08:32:30.065375Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:30.065427Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:32:30.066707Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-02-19T08:32:30.066837Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:30.066874Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-02-19T08:32:30.066996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:30.067032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:878:2835] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:30.067547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:30.067824Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 316us result status StatusSuccess 2026-02-19T08:32:30.068309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-Size0-1 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 0, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.641704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.738961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.739024Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.751438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.752035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.752214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.763750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.764476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.765223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.771411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.772678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.772743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.772907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.772967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.773018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.950503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.951901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.952692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... HARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:30.497418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.497508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.043000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.043000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.497644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:30.497701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.497865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460043000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:30.498316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.512721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.512832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.514830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.514986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.515076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.046000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.515159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.515739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:30.516040Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 279us result status StatusSuccess 2026-02-19T08:32:30.516961Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:30.522860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:30.523087Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 276us result status StatusSuccess 2026-02-19T08:32:30.523585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.679912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.679991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.680052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.680109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.680158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.680207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.680253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.680281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 1, BatchMaxTime 0.100000s 2026-02-19T08:32:28.680335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.680406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.681222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.681508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.772923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.772971Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.785381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.790039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.790222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.799754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.800258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.800730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.800930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.803055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.803186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.803952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.803993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.804081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.804116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.804150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.804265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.964900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.966831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 30.019576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:32:30.019634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:30.020374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:30.020471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:30.020500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:30.020526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:30.020558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:30.020626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:32:30.021434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1410 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:30.021476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:30.021624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1410 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:30.023447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1410 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:32:30.024151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.024209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:30.024455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.024520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:30.024610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.024690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.024743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.024778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.024855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:30.028725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:30.030226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:30.030344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.030469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.030802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.030854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:30.030958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:30.031012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.031066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:30.031125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.031164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:30.031235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 101 2026-02-19T08:32:30.031305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.031347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:30.031403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:30.031538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:30.036895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:30.036958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:738:2714] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:30.347506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:30.347623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.347734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.031000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.031000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.347844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:30.347930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.348721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600452060031000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:30.349311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.364273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.364321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.365843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.365978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.366047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.034000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.366083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] Test command err: 2026-02-19T08:31:45.763288Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491208527061524:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:45.763489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:46.053802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:46.083791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:46.083853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:46.145857Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491208527061281:2081] 1771489905708992 != 1771489905708995 2026-02-19T08:31:46.160504Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:46.165679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:46.282199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:46.282219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:46.282225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:46.282276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:46.353775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:46.738696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:46.754119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:46.804706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:46.930521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.081477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.148028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.042641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225706932345:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.042746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.043548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225706932355:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.043610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.353501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.385643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.418817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.446941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.475379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.512288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.546673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.594955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.675259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225706933226:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.675356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.675490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225706933231:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.675629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491225706933233:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.675661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:49.679189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:49.689648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491225706933235:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:31:49.759966Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491225706933286:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:50.765812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491208527061524:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:50.765903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:52.601193Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491238702445559:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:52.602137Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... Result TxId: 281474976710669 2026-02-19T08:32:26.130846Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130863Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130890Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130901Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130925Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130936Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130954Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130974Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.130989Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131012Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038002 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131019Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131045Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131054Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131074Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131093Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131104Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131131Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131132Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131162Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131175Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131190Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131213Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131220Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131247Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131248Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131297Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131308Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131342Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131343Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131381Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131385Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131424Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131428Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131459Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131461Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131491Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131498Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.131522Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-02-19T08:32:26.134901Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:26.140865Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:26.214262Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491387147803491:6063] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:26.226023Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:27.155581Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:32:27.164795Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:32:22.673216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:22.775674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:22.795009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:22.795112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:22.796609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:23.165904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:23.166054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:23.254674Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489940285097 != 1771489940285101 2026-02-19T08:32:23.266016Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:23.315132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:23.415707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:32:23.729472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:23.743106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:23.860413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.911154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:32:23.912226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:32:23.912495Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2770] 2026-02-19T08:32:23.912739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.923048Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:891:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:32:23.958363Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.958540Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.960943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:32:23.961024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:32:23.961072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:32:23.962779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.962935Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.963022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:907:2770] in generation 1 2026-02-19T08:32:23.973792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.000287Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:32:24.001625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.001780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:909:2780] 2026-02-19T08:32:24.001839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:24.001879Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:32:24.001914Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:24.002150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:891:2770], Recipient [1:891:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.002206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.003413Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:32:24.003541Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:32:24.003652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:24.003695Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:24.003852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:32:24.003899Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:32:24.003936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:32:24.003970Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:32:24.004017Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:24.004107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:894:2771], Recipient [1:891:2770]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:32:24.004164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:32:24.004220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:890:2769], serverId# [1:894:2771], sessionId# [0:0:0] 2026-02-19T08:32:24.004319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269549568, Sender [1:404:2403], Recipient [1:894:2771] 2026-02-19T08:32:24.004370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3222: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-02-19T08:32:24.004467Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:32:24.004927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-02-19T08:32:24.004985Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:32:24.005765Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:32:24.005865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-02-19T08:32:24.005913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-02-19T08:32:24.005949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-02-19T08:32:24.006028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-02-19T08:32:24.006395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-02-19T08:32:24.006432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-02-19T08:32:24.006488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-02-19T08:32:24.006522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-02-19T08:32:24.006585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-02-19T08:32:24.006629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-02-19T08:32:24.006672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-02-19T08:32:24.006707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-02-19T08:32:24.006735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-02-19T08:32:24.008592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269746185, Sender [1:910:2781], Recipient [1:891:2770]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-02-19T08:32:24.008651Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:24.019491Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransaction ... 73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:290:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:28.908392Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:28.908430Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:29.147653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:29.147781Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:29.176054Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1771489946620346 != 1771489946620350 2026-02-19T08:32:29.178921Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:29.227056Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:29.307904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:32:29.580264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:29.593607Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:29.701321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.724052Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:892:2770] 2026-02-19T08:32:29.724294Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:29.771029Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:29.771179Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:29.772776Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:32:29.772862Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:32:29.772923Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:32:29.773246Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:29.773398Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:29.773476Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [2:907:2770] in generation 1 2026-02-19T08:32:29.784165Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:29.784249Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:32:29.784363Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:29.784453Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [2:909:2780] 2026-02-19T08:32:29.784494Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:29.784533Z node 2 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:32:29.784573Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:29.784959Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:32:29.785053Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:32:29.785123Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:29.785166Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:29.785205Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:32:29.785247Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:29.785619Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:889:2768], serverId# [2:894:2771], sessionId# [0:0:0] 2026-02-19T08:32:29.785772Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:32:29.786002Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:134: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-02-19T08:32:29.786073Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:222: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-02-19T08:32:29.787678Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:29.798307Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:32:29.798427Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-02-19T08:32:29.915308Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:923:2788], serverId# [2:925:2790], sessionId# [0:0:0] 2026-02-19T08:32:29.916731Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-02-19T08:32:29.916788Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:29.917379Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:29.917433Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:32:29.917483Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:32:29.917781Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:32:29.917918Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:32:29.918336Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:29.918404Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:32:29.918840Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:32:29.919258Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:29.920916Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:32:29.920967Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:29.921293Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:32:29.921352Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:29.922702Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:29.922748Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:29.922796Z node 2 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:32:29.922856Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:405:2404], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:32:29.922907Z node 2 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:32:29.922986Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:29.923548Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:29.925193Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:32:29.925263Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:32:29.926007Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:32:29.930987Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2816], serverId# [2:960:2817], sessionId# [0:0:0] 2026-02-19T08:32:29.931110Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1-3 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 1, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.641633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.734117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.734169Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.745212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.745897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.746079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.756719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.757397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.758141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.758397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.763899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.943419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.946775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.946932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.947796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... oExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.499589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.043000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.043000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:30.499788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.499937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460043000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:30.500468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.518328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by size), batch size 1/1, batch age 0.000000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:30.518423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.518490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.520567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.520709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.520796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.046000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.520852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.521321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:30.521604Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 263us result status StatusSuccess 2026-02-19T08:32:30.522578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:30.528445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:30.528682Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 283us result status StatusSuccess 2026-02-19T08:32:30.529195Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::DynamicBatchingToggle [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 1, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.644247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.727262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.727330Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.738131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.740576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.740761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.756035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.756698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.757410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.757673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.763864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.930382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.933948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.934511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... PathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-02-19T08:32:30.498186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-02-19T08:32:30.498209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-02-19T08:32:30.498230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-02-19T08:32:30.498278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-02-19T08:32:30.498306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-02-19T08:32:30.498329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-02-19T08:32:30.498363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-02-19T08:32:30.498399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-02-19T08:32:30.498456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-02-19T08:32:30.498517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-02-19T08:32:30.498545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-02-19T08:32:30.498568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-02-19T08:32:30.498590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-02-19T08:32:30.498612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-02-19T08:32:30.498641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-02-19T08:32:30.498983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2099: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2159: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2217: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:32:30.499500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.499871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-02-19T08:32:30.500319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.500463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.500934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.501034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.501376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.501559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.501625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.501724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.502021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.502105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.502641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.502947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.503771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.503869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.504024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.504095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.504150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.509431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.514483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.514547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.515440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.515499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.515547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.517647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:823:2786] sender: [1:891:2058] recipient: [1:15:2062] 2026-02-19T08:32:30.684686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 832 rowCount 4 cpuUsage 0.0523 2026-02-19T08:32:30.739934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:32:30.740020Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.816820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:30.816932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.817037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableDynamic, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 3660.064000s, next: now + 0.000000s, now: 2020-09-18T19:02:00.097000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.817141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableDynamic, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:30.817215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:30.817371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600455720097000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:30.818106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.842815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by size), batch size 1/1, batch age 0.000000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:30.842891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.842936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.845015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.845153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:30.845235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableDynamic, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T19:02:00.100000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:30.845290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-true >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1000-6 >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::MultipleTablesConditionalErase-EnableConditionalEraseResponseBatching-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:29.667511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:29.667580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:29.667641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:29.667686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:29.667725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:29.667764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:29.667807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:29.667826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 1, BatchMaxTime 0.100000s 2026-02-19T08:32:29.667893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:29.667957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:29.668662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:29.668883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:29.737547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:29.737596Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:29.747068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:29.747708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:29.747864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:29.756642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:29.757251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:29.757926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:29.758174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:29.760822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:29.760975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:29.761855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:29.761900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:29.761980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:29.762025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:29.762071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:29.762206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.884361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.885418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.885579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.885672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.885756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.885989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.886632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 2:30.903031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:30.903756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:30.903836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:30.903862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:30.903889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:30.903916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:30.903976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:32:30.904758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1403 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:30.904794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:30.904911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1403 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:30.905224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1403 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:32:30.905788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.905829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:30.905970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.906037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:30.906136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:30.906215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.906250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.906285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:30.906349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:30.908809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:30.910098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:30.910197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.910284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.910543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.910601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:30.910694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:30.910740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.910782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:30.910841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.910881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:30.910942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 101 2026-02-19T08:32:30.910984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:30.911032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:30.911069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:30.911203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:30.912879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:30.912930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:738:2714] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:31.171772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:31.171865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:31.171940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.031000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.031000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:31.172011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:31.172058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:31.172190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600452060031000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:31.172690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:31.186194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by size), batch size 1/1, batch age 0.000000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:31.186273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:31.186328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:31.187950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:31.188064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:31.188132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableMulti, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.034000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:31.188180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |95.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize100-5 >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:30.804720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:30.804795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.804873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:30.804914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:30.804955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:30.804991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:30.805032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:30.805058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:30.805155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.805223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:30.806044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:30.806306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:30.895505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:30.895572Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.907145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:30.907800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:30.907971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:30.918777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:30.919460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:30.920143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.920399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:30.923404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.923572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.924641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.924699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.924799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.924845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.924885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.925035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:31.063310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.063938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.064514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 811 RawX2: 4294970072 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.290710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:32.290819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 811 RawX2: 4294970072 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.290874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:32.290968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 811 RawX2: 4294970072 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.291062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:32.291127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1094: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-02-19T08:32:32.292838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.292987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.304866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.304902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:32.304987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.305038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:32.305106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:32.305143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:32.305170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.305203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:32.305232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:32.305251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:32.306640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.306951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.306994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-02-19T08:32:32.307035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-02-19T08:32:32.307061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-02-19T08:32:32.307114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-02-19T08:32:32.307162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Done -> Done 2026-02-19T08:32:32.308478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.308517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:32.308601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:32.308630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:32.308671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:32.308696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:32.308719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:32.308762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 102 2026-02-19T08:32:32.308806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:32.308845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:32.308873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:32.308961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:32.308989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:32.310274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:32.310330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:841:2798] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:32.310694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:32.310862Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 178us result status StatusSuccess 2026-02-19T08:32:32.311214Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-FlagOff-2 >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TBackupCollectionTests::EmptyIncrementalBackupRace [GOOD] >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardTTLTests::CheckCounters >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd3Consumers [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatch >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1000-6 [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq3Consumers >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-true [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:33.239539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:33.239618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.239693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:33.239745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:33.239782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:33.239829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:33.239879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:33.239904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:33.239971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.240032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:33.240893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:33.241176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:33.311808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:33.311872Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:33.319968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:33.320465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:33.320611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:33.328044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:33.328546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:33.329157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.329374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:33.331905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.332080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:33.332917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.332956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.333050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:33.333081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.333108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:33.333211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.442098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.442990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.443940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.444021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.928326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.928462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:33.931422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:33.931525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:33.931927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.932066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:33.932125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.932378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:33.932436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.932560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:33.932611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:33.934261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.934312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.934462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.934503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:32:33.934810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.934858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:33.934943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:33.934980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.935016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:33.935039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.935066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:33.935092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.935119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:33.935138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:33.935191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:33.935230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:33.935287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:33.935724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:33.935803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:33.935828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:33.935852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:33.935880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:33.935937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:33.939147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:33.939572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1771489953.940757 594889 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-02-19T08:32:33.941140Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:673:2661] Bootstrap 2026-02-19T08:32:33.942020Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:673:2661] Become StateWork (SchemeCache [1:678:2666]) 2026-02-19T08:32:33.943511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:33.943814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.944040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-02-19T08:32:33.944775Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:33.946878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:33.947018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-02-19T08:32:33.947313Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1771489953.947630 594889 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-02-19T08:32:33.949130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:33.949360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:574: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.949487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2026-02-19T08:32:33.951045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:33.951162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:32:22.619922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:22.767989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:22.795672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:22.795731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:22.796609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:23.165868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:23.166032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:23.250823Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489940285104 != 1771489940285108 2026-02-19T08:32:23.265963Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:23.315344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:23.425502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:32:23.729638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:23.743133Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:23.860513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.910015Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2779] 2026-02-19T08:32:23.910290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.959964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.960311Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.962067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:32:23.962158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:32:23.962229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:32:23.962759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.963047Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2781] 2026-02-19T08:32:23.963246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.972340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.972434Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:952:2779] in generation 1 2026-02-19T08:32:23.973332Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:914:2783] 2026-02-19T08:32:23.973545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.982478Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.982633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.984090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:32:23.984166Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:32:23.984216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:32:23.984488Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.984802Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.984853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:966:2781] in generation 1 2026-02-19T08:32:23.985883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.985997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.987366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-02-19T08:32:23.987437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037891 2026-02-19T08:32:23.987493Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037891 2026-02-19T08:32:23.987755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.987944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.988011Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037891 persisting started state actor id [1:969:2783] in generation 1 2026-02-19T08:32:23.988315Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:922:2788] 2026-02-19T08:32:23.988501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.997350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.997468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.998867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:32:23.998932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:32:23.998980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:32:23.999277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.999420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.999491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:975:2788] in generation 1 2026-02-19T08:32:24.010589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.037777Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:32:24.037990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.038098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:980:2821] 2026-02-19T08:32:24.038149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:24.038176Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:32:24.038219Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:24.038645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.038678Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:32:24.038723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.038783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:981:2822] 2026-02-19T08:32:24.038813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:32:24.038832Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:32:24.038848Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:32:24.038883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.038901Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-02-19T08:32:24.038932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.039011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037891, actorId: [1:982:2823] 2026-02-19T08:32:24.039037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037891 2026-02-19T08:32:24.039060Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-02-19T08:32:24.039075Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:32:24.039268Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:32:24.039359Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:32:24.039586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:24.039620Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:24.039663Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at ... 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-02-19T08:32:32.990930Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:32:32.991428Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:939:2800] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-02-19T08:32:32.991907Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:973:2826] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-02-19T08:32:32.992095Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1286: Upload rows: got OK from shard 72075186224037889 description: 2026-02-19T08:32:32.992244Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1416: completed with status SUCCESS >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-false [GOOD] >> KqpLimits::ReplySizeExceeded [GOOD] >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize100-5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:56.615589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:56.615691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.615763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:56.615831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:56.615888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:56.615946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:56.615983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:56.616013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:56.616081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:56.616711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:56.617644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:56.620096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:56.710178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:56.710231Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:56.721007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:56.721535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:56.721730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:56.732030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:56.732639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:56.733264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:56.733485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:56.736071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.736235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:56.737264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:56.737316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:56.737426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:56.737487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:56.737544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:56.737745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:56.905187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.906968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:56.907030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... Finished 2026-02-19T08:32:33.378904Z node 12 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-02-19T08:32:33.389887Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:33.390156Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.390385Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-02-19T08:32:33.390447Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-02-19T08:32:33.390660Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-02-19T08:32:33.390740Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:33.393107Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.393310Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:32:33.393537Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.393616Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:32:33.393673Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:32:33.393753Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:32:33.395620Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.395697Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:32:33.395764Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:32:33.397366Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.397416Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.397493Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.397562Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.397741Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:33.399242Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:33.399424Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:33.399779Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.399915Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 51539609711 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:33.399978Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.400274Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:33.400348Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:33.400577Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:33.400664Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:33.402413Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.402478Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.402734Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.402796Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:33.403160Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.403221Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:33.403396Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:33.403448Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.403505Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:33.403557Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.403614Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:33.403671Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:33.403723Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:33.403767Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:33.403858Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:33.403911Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:33.403959Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:33.404615Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:33.404744Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:33.404796Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:33.404855Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:33.404914Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:33.405027Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:33.407772Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:33.408326Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.411942Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:680:2669] Bootstrap 2026-02-19T08:32:33.413528Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:680:2669] Become StateWork (SchemeCache [12:686:2675]) 2026-02-19T08:32:33.414579Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:680:2669] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:33.416709Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:33.446789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:33.446879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.446963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:33.447028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:33.447070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:33.447125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:33.447163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:33.447190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:33.447282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.447354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:33.448270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:33.448555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:33.544267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:33.544337Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:33.557079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:33.557768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:33.557972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:33.569598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:33.570314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:33.571137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.571416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:33.574534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.574753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:33.575902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.575957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.576090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:33.576139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.576184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:33.576357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.736094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.737983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.738058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.738149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.738236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... : 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.273946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:32:34.274102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.274144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:32:34.274175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:32:34.274209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:32:34.275683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.275739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:32:34.275790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:32:34.277133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.277239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.277290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.277349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.277466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:34.278762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:34.278861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:34.279142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.279239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.279291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.279580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:34.279627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.279750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:34.279802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:34.281397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.281454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.281627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.281673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:32:34.281943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.281987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:34.282061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:34.282093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.282122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:34.282160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.282190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:34.282223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.282251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:34.282277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:34.282335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:34.282364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:34.282400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:34.282798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:34.282904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:34.282977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:34.283021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:34.283063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:34.283165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:34.285691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:34.286138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:32:34.287457Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:675:2663] Bootstrap 2026-02-19T08:32:34.288588Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:675:2663] Become StateWork (SchemeCache [1:680:2668]) 2026-02-19T08:32:34.290959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:34.291306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.291432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.291894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-02-19T08:32:34.292570Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:675:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:34.295623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.295844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:32:34.296319Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:33.144059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:33.144146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.144225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:33.144266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:33.144296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:33.144326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:33.144358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:33.144376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:33.144431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.144477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:33.145111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:33.145333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:33.208934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:33.208982Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:33.218449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:33.219008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:33.219165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:33.228514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:33.229195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:33.229787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.230035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:33.232550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.232702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:33.233582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.233627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.233699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:33.233759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.233790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:33.233891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.360971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.361988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.362028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.362062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.362108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.362171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.362223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... l.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:34.052779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.052871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.052914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:34.052948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:34.052976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:34.053059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-02-19T08:32:34.054472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1353 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.054516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.054663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1353 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.054812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1353 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:32:34.056271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.056318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.056452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.056513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:34.056607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.056685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.056760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.056815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.056864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:34.057570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:34.058740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:34.060350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.060494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.060751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.060801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:34.060906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:34.060944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.060985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:34.061021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.061055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:34.061115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2717] message: TxId: 101 2026-02-19T08:32:34.061161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.061203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:34.061231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:34.061377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:34.063016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:34.063079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:742:2718] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:32:34.065604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:34.065865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.066195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2026-02-19T08:32:34.068131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.068332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-02-19T08:32:34.070683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:34.070915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.071190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2026-02-19T08:32:34.072885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.073079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize1000-6 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:32.570587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:32.570671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:32.570752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:32.570798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:32.570835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:32.570874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:32.570912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:32.570939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 1000, BatchMaxTime 0.100000s 2026-02-19T08:32:32.571043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:32.571116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:32.571979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:32.572266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:32.663565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:32.663635Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:32.675806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:32.676369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:32.676552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:32.687609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:32.688305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:32.689110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:32.689382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:32.692547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:32.692733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:32.693832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:32.693909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:32.694036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:32.694087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:32.694129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:32.694290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.844121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.849922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.850985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814 ... sion: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.214289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:34.214395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.214517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.043000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.043000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.214629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:34.214698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.214849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460043000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:34.215426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.229414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 1/1000, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:34.230076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.230438Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 336us result status StatusSuccess 2026-02-19T08:32:34.231390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.237058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.237308Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 263us result status StatusSuccess 2026-02-19T08:32:34.237815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BatchingDoesNotAffectCorrectness-EnableConditionalEraseResponseBatching-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:32.618758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:32.618848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:32.618926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:32.618965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:32.619001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:32.619051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:32.619092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:32.619116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:32.619168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:32.619237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:32.620018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:32.620277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:32.706245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:32.706299Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:32.720398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:32.721080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:32.721271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:32.733269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:32.734025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:32.734784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:32.735045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:32.738269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:32.738445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:32.739526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:32.739579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:32.739676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:32.739729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:32.739779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:32.739947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:32.884087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:32.885968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:33.879000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.879043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.879076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:33.879119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:33.881732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:33.882207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:33.882715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.882825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.883048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.883091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:33.883190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:33.883228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:33.883276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:33.883307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:33.883390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:33.883447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2717] message: TxId: 101 2026-02-19T08:32:33.883489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:33.883534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:33.883568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:33.883699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:33.885281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:33.885326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:742:2718] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:34.261181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:34.261242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.261320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableCorrectness, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.045000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.045000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.261417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableCorrectness, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:34.261457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.261560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460045000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:34.261942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.275089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 1/100, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:34.275575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableCorrectness" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.275806Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableCorrectness" took 240us result status StatusSuccess 2026-02-19T08:32:34.276462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableCorrectness" PathDescription { Self { Name: "TTLTableCorrectness" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableCorrectness" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:32:22.788634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:22.898036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:22.919858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:22.919934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:22.920479Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:23.198761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:23.198892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:23.273102Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489940285130 != 1771489940285134 2026-02-19T08:32:23.282134Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:23.328118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:23.425330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:32:23.732364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:23.745452Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:23.860420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.910026Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:909:2779] 2026-02-19T08:32:23.910282Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.960125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.960415Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.961961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:32:23.962042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:32:23.962106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:32:23.962736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.963010Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2781] 2026-02-19T08:32:23.963237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.971899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.972014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:952:2779] in generation 1 2026-02-19T08:32:23.972922Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:914:2783] 2026-02-19T08:32:23.973127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.981702Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.981879Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.983168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:32:23.983241Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:32:23.983280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:32:23.983579Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.983852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.983912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:966:2781] in generation 1 2026-02-19T08:32:23.984938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.985029Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.986196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-02-19T08:32:23.986258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037891 2026-02-19T08:32:23.986303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037891 2026-02-19T08:32:23.986589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.986747Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.986800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037891 persisting started state actor id [1:969:2783] in generation 1 2026-02-19T08:32:23.987106Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:922:2788] 2026-02-19T08:32:23.987307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:23.995521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:23.995637Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:23.996797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-02-19T08:32:23.996863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037890 2026-02-19T08:32:23.996912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037890 2026-02-19T08:32:23.997155Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:23.997257Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:23.997314Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037890 persisting started state actor id [1:975:2788] in generation 1 2026-02-19T08:32:24.008315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.038667Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:32:24.038838Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.038951Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:980:2821] 2026-02-19T08:32:24.038992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:24.039022Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:32:24.039067Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:24.039495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.039536Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:32:24.039584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.039633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:981:2822] 2026-02-19T08:32:24.039654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:32:24.039692Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:32:24.039716Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:32:24.039761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.039782Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-02-19T08:32:24.039822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.039879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037891, actorId: [1:982:2823] 2026-02-19T08:32:24.039906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037891 2026-02-19T08:32:24.039936Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-02-19T08:32:24.040013Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-02-19T08:32:24.040199Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:32:24.040296Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:32:24.040549Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:24.040588Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:24.040629Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at ... 8:32:33.180886Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.181274Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:33.181332Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-02-19T08:32:33.181404Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-02-19T08:32:33.181759Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-02-19T08:32:33.181909Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-02-19T08:32:33.182142Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:33.182215Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-02-19T08:32:33.182722Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-02-19T08:32:33.183227Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:33.185251Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3778: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-02-19T08:32:33.185310Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.186754Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-02-19T08:32:33.186857Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:33.187915Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:33.187970Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:33.188025Z node 3 :TX_DATASHARD INFO: datashard.cpp:1307: Change sender activated: at tablet: 72075186224037888 2026-02-19T08:32:33.188094Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:813: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:404:2403], exec latency: 0 ms, propose latency: 0 ms 2026-02-19T08:32:33.188153Z node 3 :TX_DATASHARD INFO: datashard.cpp:1604: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-02-19T08:32:33.188264Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.190248Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:33.191473Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2971: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-02-19T08:32:33.191531Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-02-19T08:32:33.192146Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3796: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-02-19T08:32:33.199084Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:33.199166Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:33.199479Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:33.200047Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:33.200223Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:33.203729Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:33.209081Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:33.326282Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-02-19T08:32:33.330051Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:32:33.365308Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:33.442830Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [3:1059:2878], serverId# [3:1060:2879], sessionId# [0:0:0] 2026-02-19T08:32:33.443359Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:2] at 72075186224037888 2026-02-19T08:32:33.443583Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-02-19T08:32:33.454605Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.514964Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:3] at 72075186224037888 2026-02-19T08:32:33.515105Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:3] at 72075186224037888, row count=3 2026-02-19T08:32:33.526269Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.596016Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:333: Executing write operation for [0:4] at 72075186224037888 2026-02-19T08:32:33.596204Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:556: Executed write operation for [0:4] at 72075186224037888, row count=3 2026-02-19T08:32:33.607196Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.610227Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2026-02-19T08:32:33.621382Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-02-19T08:32:33.621458Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:33.623056Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-02-19T08:32:33.623448Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-02-19T08:32:33.623666Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:33.623707Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:32:33.623750Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for WaitForStreamClearance 2026-02-19T08:32:33.623985Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:32:33.624036Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:33.624566Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 1 2026-02-19T08:32:33.624903Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-02-19T08:32:33.625039Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715663, PendingAcks: 0 2026-02-19T08:32:33.625082Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 0 2026-02-19T08:32:33.659105Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: FullScan complete at 72075186224037888 2026-02-19T08:32:33.659191Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4498: Found op: cookie: 281474976715663, at: 72075186224037888 2026-02-19T08:32:33.659409Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:33.659460Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-02-19T08:32:33.659504Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for ReadTableScan 2026-02-19T08:32:33.659644Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:33.659696Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:33.659734Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:30.631305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:30.631393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.631437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:30.631467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:30.631493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:30.631518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:30.631550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:30.631566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 5, BatchMaxTime 0.100000s 2026-02-19T08:32:30.631629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.631679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:30.632283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:30.632514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:30.692933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:30.692985Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.705149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:30.705724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:30.705927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:30.716648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:30.717762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:30.718522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.718780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:30.721692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.721880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.722908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.722961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.723055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.723095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.723142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.723326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.846855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.847991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.848269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 86233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:34.551390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-02-19T08:32:34.551807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.551866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:32:34.552830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.552874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:32:34.559613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.559666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-02-19T08:32:34.560726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.560770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.561247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.561505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.561604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable3, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:3, tabletId: 72075186233409548, last: now - 0.003000s, next: now + 3599.997000s, now: 2020-09-18T22:04:00.230000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.561770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.561807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:34.562040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.562291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.562360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable4, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], shardIdx: 72057594046678944:4, tabletId: 72075186233409549, last: now - 0.002000s, next: now + 3599.998000s, now: 2020-09-18T22:04:00.230000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.564538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.564655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.564728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable5, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], shardIdx: 72057594046678944:5, tabletId: 72075186233409550, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.231000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.564834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.564891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.564981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.565025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.565049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.565103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.565165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable1, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.231000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.565223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.565284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.565333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable2, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], shardIdx: 72057594046678944:2, tabletId: 72075186233409547, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.231000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.565364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.628255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2026-02-19T08:32:34.628432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2026-02-19T08:32:34.628522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2026-02-19T08:32:34.628631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-02-19T08:32:34.628705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:32:34.628755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:32:34.628841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:32:34.628880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:32:34.628916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:32:34.628961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:32:34.628991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2026-02-19T08:32:34.629051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2026-02-19T08:32:34.629103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-02-19T08:32:34.629143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2026-02-19T08:32:34.629177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2026-02-19T08:32:34.629247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2026-02-19T08:32:34.641776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.641841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-02-19T08:32:34.643663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 ... waiting for single conditional erase batch completed (done) 2026-02-19T08:32:34.648988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.649100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable6, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], shardIdx: 72057594046678944:6, tabletId: 72075186233409551, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.233000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.649158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-true [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchSize100-5 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:33.119308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:33.119407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.119482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:33.119526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:33.119566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:33.119600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:33.119631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:33.119655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:33.119737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.119806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:33.121101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:33.121391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:33.205476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:33.205530Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:33.216955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:33.217439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:33.217618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:33.228720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:33.229403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:33.230162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:33.230438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:33.233503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.233689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:33.234885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:33.234955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:33.235083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:33.235131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:33.235171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:33.235339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:33.368116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.368861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.368970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:33.369580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... rsion: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.739808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:34.739913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:34.740028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.043000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.043000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:34.740139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:34.740206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:34.740350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460043000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:34.740901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.759465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 1/100, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:34.760194Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.760508Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 330us result status StatusSuccess 2026-02-19T08:32:34.761441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.766911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.767183Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 252us result status StatusSuccess 2026-02-19T08:32:34.767692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:33.960902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:33.960969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.961011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:33.961037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:33.961061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:33.961085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:33.961120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:33.961144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:33.961202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:33.961249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:33.961953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:33.962173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.022978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.023031Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.031109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.031668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.031818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.040438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.041113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.041771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.041995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.044490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.044635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.045446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.045484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.045550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.045582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.045626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.045776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.167322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.168862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 2026-02-19T08:32:34.870667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:34.871738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.871847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.871890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:34.871920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:34.871947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:34.872017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:32:34.873647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1251 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.873691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.873840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1251 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.873983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1251 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:32:34.875106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.875169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.875316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.875375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:34.875450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 715 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.875530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.875582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.875619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.875659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:34.876792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:34.877761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:34.879277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.879392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.879612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.879667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:34.879759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:34.879790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.879827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:34.879856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.879889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:34.879956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2719] message: TxId: 101 2026-02-19T08:32:34.880002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:34.880042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:34.880072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:34.880203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:34.881756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:34.881803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:744:2720] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:34.882266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:34.882486Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 220us result status StatusSuccess 2026-02-19T08:32:34.882932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-FlagOff-2 [GOOD] >> TSchemeShardTTLTests::CondEraseOverReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.250359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.250442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.250530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.250594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.250637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.250676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.250710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.250740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.250814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.250882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.251786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.252063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.330318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.330373Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.342180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.342662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.342817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.352725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.353363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.354151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.354454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.358042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.358254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.359275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.359338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.359548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.359594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.359639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.359815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.484830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.485814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.485958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.486848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... omplete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.987904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:32:34.988049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.988098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:32:34.988127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:32:34.988162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:32:34.989622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.989678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:32:34.989742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:32:34.991235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.991295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.991342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.991394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.991550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:34.993024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:34.993151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:34.993458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.993569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:34.993617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.994030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:34.994097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:34.994263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:34.994341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.996017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.996065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.996206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.996258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:34.996802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.996842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:34.996921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:34.996946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.996979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:34.997010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.997045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:34.997079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:34.997116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:34.997154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:34.997223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:34.997260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:34.997284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:34.997679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:34.997806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:34.997843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:34.997876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:34.997920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:34.997980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:35.001130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:35.001624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:32:35.003073Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2659] Bootstrap 2026-02-19T08:32:35.004202Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2659] Become StateWork (SchemeCache [1:676:2664]) 2026-02-19T08:32:35.006141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:35.006428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.006535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2026-02-19T08:32:35.006980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2026-02-19T08:32:35.008279Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:35.010649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:35.010866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:32:35.011316Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.090153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.090246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.090305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.090336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.090360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.090387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.090412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.090430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.090480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.090536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.091236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.091493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.158682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.158752Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.170187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.170712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.170876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.181691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.182374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.183077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.183337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.186195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.186358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.187403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.187454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.187550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.187598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.187636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.187776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.298471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.301951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.302008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.302050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.302107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.302162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... oard.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-02-19T08:32:34.991445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-02-19T08:32:34.991901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.991982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-02-19T08:32:34.993174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.993276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.993333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:34.993390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-02-19T08:32:34.993435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:34.994696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.994787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:32:34.994834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:32:34.994884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:32:34.994913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:34.994983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-02-19T08:32:34.996345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1183 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.996387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.996524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1183 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:32:34.996660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1183 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:32:34.998027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.998070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:34.998217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.998269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:34.998397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:34.998474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.998526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.998584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:34.998629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:34.999268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:35.000482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:35.002014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.002123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.002373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.002421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:35.002515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:35.002602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:35.002638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:35.002666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:35.002705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:35.002765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2717] message: TxId: 101 2026-02-19T08:32:35.002809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:35.002844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:35.002876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:35.003013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:35.004996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:35.005043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:742:2718] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-02-19T08:32:35.007457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:35.007647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.007972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2026-02-19T08:32:35.009843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:35.010091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.089990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.090082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.090147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.090189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.090224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.090263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.090300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.090323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.090431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.090510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.091320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.091586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.179065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.179142Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.190758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.191420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.191624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.202623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.203340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.204065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.204328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.207371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.207547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.208624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.208689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.208800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.208849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.208891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.209057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.322026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.322950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.323272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 1:212:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-02-19T08:32:35.139234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.139301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:35.139984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:32:35.140071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-02-19T08:32:35.140103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-02-19T08:32:35.140173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:32:35.140206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:35.140275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-02-19T08:32:35.142517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-02-19T08:32:35.154681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-02-19T08:32:35.154743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:35.154911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-02-19T08:32:35.155059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } CommitVersion { Step: 5000040 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-02-19T08:32:35.155729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:32:35.155834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:35.156004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:32:35.156118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:35.156231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-02-19T08:32:35.156320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.156378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.156423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:35.156471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 104:0 ProposedWaitParts -> Done 2026-02-19T08:32:35.158650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.159631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.159903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.159970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 104:0 ProgressState 2026-02-19T08:32:35.160069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:32:35.160107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:35.160144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-02-19T08:32:35.160178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:35.160217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-02-19T08:32:35.160275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2717] message: TxId: 104 2026-02-19T08:32:35.160346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-02-19T08:32:35.160388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-02-19T08:32:35.160418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 104:0 2026-02-19T08:32:35.160524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:35.162202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:32:35.162250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:850:2818] TestWaitNotification: OK eventTxId 104 2026-02-19T08:32:35.162690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:35.162865Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 199us result status StatusSuccess 2026-02-19T08:32:35.163188Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: 2026-02-19T08:31:42.200299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491195840450900:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:42.200360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:42.241299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:42.532344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:42.532531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:42.536933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:42.595188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:42.654251Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:42.656117Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491195840450870:2081] 1771489902198621 != 1771489902198624 2026-02-19T08:31:42.749983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:42.750005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:42.750013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:42.750156Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:42.810497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:43.206820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:43.243791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:43.249433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:31:43.310654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:47.200569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491195840450900:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:47.200632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:31:52.041910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491238790125628:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.041918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491238790125639:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.042035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.042322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491238790125643:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.042395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:52.045766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:52.056403Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491238790125642:2452], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:31:52.122233Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491238790125695:3168] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:52.394996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:57.354815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:31:57.354859Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:00.801283Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491273893432671:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:00.803042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:00.820871Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:00.911316Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:00.912904Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491273893432630:2081] 1771489920800011 != 1771489920800014 2026-02-19T08:32:00.956529Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:00.956627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:00.961324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:01.053515Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:01.053541Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:01.053548Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:01.053656Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:01.124186Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:01.483137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:01.503061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:01.829753Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:05.802028Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491273893432671:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:05.802094Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:10.298494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491316843107390:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:10.298564Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491316843107398:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:10.298611Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:10.300841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491316843107405:2453], DatabaseId: /Root, PoolId: def ... 2026-02-19T08:32:26.044876Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:26.149094Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:26.150408Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491386062056183:2081] 1771489946032203 != 1771489946032206 2026-02-19T08:32:26.160926Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:26.161001Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:26.178816Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:26.217731Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:26.217759Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:26.217768Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:26.217867Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:26.240702Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:26.623702Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:26.632433Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:26.682123Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:26.843363Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:26.910801Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:27.044091Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:29.135009Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491398946959940:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.135102Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.135569Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491398946959950:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.135617Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.219466Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.262437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.296884Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.339443Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.380251Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.417128Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.452690Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.529327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:29.601639Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491398946960826:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.601729Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.601820Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491398946960831:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.602365Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491398946960833:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.602425Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:29.605785Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:29.615413Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491398946960834:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:29.678167Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491398946960886:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:31.033398Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491386062056220:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:31.033492Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:31.245883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:34.205614Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=YmZiOGVlZmItOGQzNDQwODktOGZjY2Q3N2QtYTIyODAxOGY=, ActorId: [5:7608491407536895774:2536], ActorState: ExecuteState, LegacyTraceId: 01khtgf23p9xz457zfmaskw99y, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202542 > 50331648)" issue_code: 2013 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase-EnableConditionalEraseResponseBatching-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:30.789436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:30.789672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.789780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:30.789838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:30.789876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:30.789928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:30.789966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:30.789996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 5, BatchMaxTime 0.100000s 2026-02-19T08:32:30.790080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.790157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:30.791046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:30.791360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:30.876472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:30.876534Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.889009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:30.889781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:30.889983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:30.915295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:30.916050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:30.916890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.917165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:30.920404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.920596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.921701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.921780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.921883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.921935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.921987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.922168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:31.078840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.079827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.079979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.080871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409548, status: OK, batch size 2/5, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:35.252666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409550, status: OK, batch size 3/5, batch age 0.002000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 4/5, batch age 0.002000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by size), batch size 5/5, batch age 0.002000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 5, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:35.253367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:35.254717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 5, at schemeshard: 72057594046678944 2026-02-19T08:32:35.254875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.254947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable4, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], shardIdx: 72057594046678944:4, tabletId: 72075186233409549, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.622000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.255000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable1, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.622000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.255098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable3, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:3, tabletId: 72075186233409548, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.622000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.255194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable5, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], shardIdx: 72057594046678944:5, tabletId: 72075186233409550, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.622000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.255270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.255330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable2, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], shardIdx: 72057594046678944:2, tabletId: 72075186233409547, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.622000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.255358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.317381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2026-02-19T08:32:35.317524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-02-19T08:32:35.317576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:32:35.317666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:32:35.317728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2026-02-19T08:32:35.317761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:32:35.317844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-02-19T08:32:35.317883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2026-02-19T08:32:35.317919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2026-02-19T08:32:35.317973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-02-19T08:32:35.318019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2026-02-19T08:32:35.318050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2026-02-19T08:32:35.318134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-02-19T08:32:35.318172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2026-02-19T08:32:35.318204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2026-02-19T08:32:35.318247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2026-02-19T08:32:35.330008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409551, status: OK, batch size 1/5, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:35.360997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:32:35.381481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7189: Handle: TEvFlushConditionalEraseBatch, at schemeshard: 72057594046678944 2026-02-19T08:32:35.381545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by time), batch size 1/5, batch age 0.100000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:35.381616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.381664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-02-19T08:32:35.383072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 ... waiting for single conditional erase batch completed (done) 2026-02-19T08:32:35.383641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.383730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable6, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], shardIdx: 72057594046678944:6, tabletId: 72075186233409551, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T22:04:00.724000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.383793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.372334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.372388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.372436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.372468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.372503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.372534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.372565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.372588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.372660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.372713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.373302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.373479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.434650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.434699Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.444948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.445520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.445736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.455672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.456339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.456862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.457043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.459182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.459347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.460083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.460119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.460197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.460228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.460252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.460348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.556367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.557797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... SHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.514788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:35.514845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-02-19T08:32:35.514888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 281474976710760:0 Propose -> Done 2026-02-19T08:32:35.516382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.516425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-02-19T08:32:35.516487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:35.516563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:35.516595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-02-19T08:32:35.516617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:35.516646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-02-19T08:32:35.516687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:129:2153] message: TxId: 281474976710760 2026-02-19T08:32:35.516720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-02-19T08:32:35.516747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-02-19T08:32:35.516771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976710760:0 2026-02-19T08:32:35.516818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-02-19T08:32:35.518182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7355: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-02-19T08:32:35.518231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7357: Message: TxId: 281474976710760 2026-02-19T08:32:35.518286Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3021: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-02-19T08:32:35.518388Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3024: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-02-19T08:32:35.519661Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-02-19T08:32:35.519778Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:35.519833Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-02-19T08:32:35.521019Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1952: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-02-19T08:32:35.521126Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1953: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:789:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-02-19T08:32:35.521200Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:342: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-02-19T08:32:35.521298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:35.521340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:878:2835] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:35.521817Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:35.522041Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 233us result status StatusSuccess 2026-02-19T08:32:35.522474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConfigurableBatchSize-BatchingDisabled-FlagOff-2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.089218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.089302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.089371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.089411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.089441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.089475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.089498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.089519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.089583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.089655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.090423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.090685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.171165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.171237Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.181261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.181828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.182019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.192966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.193629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.194372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.194637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.197493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.197663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.198715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.198771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.198912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.198963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.199003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.199151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.343403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.344973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.345058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.345132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.345186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.345267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.345355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:35.686546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.686639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 1600452060.043000s, next: now + 0.000000s, now: 2020-09-18T18:01:00.043000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.686717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:35.686767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.686890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600448460043000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:35.687410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:35.701333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.701384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:35.702940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.703091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:35.703157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLTableBatchSize, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 3599.999000s, now: 2020-09-18T18:01:00.046000Z, at schemeshard: 72057594046678944 2026-02-19T08:32:35.703196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:35.703575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:35.703794Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 206us result status StatusSuccess 2026-02-19T08:32:35.704431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:35.708725Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableBatchSize" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:35.708920Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableBatchSize" took 205us result status StatusSuccess 2026-02-19T08:32:35.709271Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableBatchSize" PathDescription { Self { Name: "TTLTableBatchSize" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableBatchSize" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:35.628988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:35.629053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:35.629096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:35.629125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:35.629148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:35.629172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:35.629196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:35.629238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:35.629286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:35.629341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:35.629932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:35.630171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:35.697766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:35.697835Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:35.706588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:35.707062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:35.707207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:35.716131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:35.716773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:35.717349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.717602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:35.720040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:35.720206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:35.721054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:35.721093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:35.721162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:35.721197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:35.721223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:35.721357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.825922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.826987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.827022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.827052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.827107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.827159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... _impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:36.767202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:36.767246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:36.767276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-02-19T08:32:36.767306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-02-19T08:32:36.767844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:36.767924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-02-19T08:32:36.767946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-02-19T08:32:36.767978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-02-19T08:32:36.768004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-02-19T08:32:36.768062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-02-19T08:32:36.769268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1087 } } CommitVersion { Step: 250 TxId: 103 } 2026-02-19T08:32:36.769300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-02-19T08:32:36.769396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1087 } } CommitVersion { Step: 250 TxId: 103 } 2026-02-19T08:32:36.769490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1087 } } CommitVersion { Step: 250 TxId: 103 } debug: NTableState::TProposedWaitParts operationId# 103:0 2026-02-19T08:32:36.769920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 944 RawX2: 4294970183 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:32:36.769957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-02-19T08:32:36.770101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 944 RawX2: 4294970183 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:32:36.770145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:36.770244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 944 RawX2: 4294970183 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-02-19T08:32:36.770320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:36.770348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.770374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:32:36.770407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 103:0 ProposedWaitParts -> Done 2026-02-19T08:32:36.772989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:36.773206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-02-19T08:32:36.774141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.774227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.774507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.774557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:32:36.774663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:36.774706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:36.774736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:36.774760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:36.774795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:32:36.774849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:807:2772] message: TxId: 103 2026-02-19T08:32:36.774892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:36.774927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:32:36.774960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:32:36.775064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:32:36.776442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:36.776497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:975:2910] TestWaitNotification: OK eventTxId 103 W0000 00:00:1771489956.776958 596083 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2026-02-19T08:32:36.778913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:36.779281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.779410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2026-02-19T08:32:36.779744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2026-02-19T08:32:36.781484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:36.781678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:35.981506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:35.981610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:35.981680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:35.981738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:35.981892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:35.981934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:35.981969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:35.981997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:35.982074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:35.982153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:35.983016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:35.983315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:36.071213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:36.071300Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:36.088611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:36.089223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:36.089415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:36.101637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:36.102291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:36.102959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:36.103179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:36.105819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:36.105964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:36.106864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:36.106911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:36.106983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:36.107021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:36.107058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:36.107168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.231957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.232873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.233889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... hemeshard_impl.cpp:2707: Change state for txid 101:2 ProposedWaitParts -> Done 2026-02-19T08:32:36.985398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:36.985434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-02-19T08:32:36.985508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:36.985542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:36.985633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 729 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:32:36.985680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:36.985746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.985769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-02-19T08:32:36.985793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:32:36.989615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:36.989810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:36.994307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:36.994639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:32:36.994779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:36.994977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.995103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:36.995468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.995662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-02-19T08:32:36.995694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:2 ProgressState 2026-02-19T08:32:36.995772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-02-19T08:32:36.995799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-02-19T08:32:36.995830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-02-19T08:32:36.995864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-02-19T08:32:36.995895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-02-19T08:32:36.996187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.996213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:36.996257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:32:36.996281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:36.996313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-02-19T08:32:36.996332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:36.996349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-02-19T08:32:36.996416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:781:2746] message: TxId: 101 2026-02-19T08:32:36.996461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-02-19T08:32:36.996502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:36.996529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:36.996632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:36.996664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-02-19T08:32:36.996678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:1 2026-02-19T08:32:36.996708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-02-19T08:32:36.996727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-02-19T08:32:36.996740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:2 2026-02-19T08:32:36.996768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-02-19T08:32:36.999123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:36.999161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:782:2747] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:36.999551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:36.999736Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 199us result status StatusSuccess 2026-02-19T08:32:37.000113Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateAlterKesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:37.416673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:37.416735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.416782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:37.416833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:37.416857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:37.416884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:37.416904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:37.416921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:37.416978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.417025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:37.417607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:37.417952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:37.500895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:37.500965Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:37.512410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:37.512867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:37.513046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:37.523498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:37.524129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:37.524832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:37.525085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:37.527968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.528132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:37.529125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:37.529264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.529362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:37.529422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:37.529459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:37.529589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:37.659072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.660988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 68Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.290418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:38.290943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:38.291004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:38.291042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:38.291069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:38.291103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:38.291195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:32:38.291648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 906 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:38.291674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:38.291771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 906 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:38.291894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 906 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-02-19T08:32:38.292362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:38.292402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:38.292519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:38.292572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:38.292640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:38.292694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:38.292724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.292745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:38.292772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:38.295206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:38.295335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.295409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.295568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.295608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:38.295681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:38.295706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:38.295729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:38.295748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:38.295769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:38.295833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2717] message: TxId: 102 2026-02-19T08:32:38.295875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:38.295907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:38.295933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:38.296018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:38.297096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:38.297133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:802:2770] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:38.297454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:38.297643Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 184us result status StatusSuccess 2026-02-19T08:32:38.298110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq3Consumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:38.757454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:38.757518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:38.757574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:38.757622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:38.757666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:38.757730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:38.757761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:38.757783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:38.757840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:38.757903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:38.758591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:38.758815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:38.838434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:38.838549Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:38.848778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:38.849396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:38.849549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:38.858639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:38.859316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:38.859915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:38.860203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:38.863511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:38.863724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:38.864846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:38.864906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:38.865040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:38.865100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:38.865163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:38.865316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:38.968558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:38.969945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 26-02-19T08:32:39.419065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-02-19T08:32:39.419268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.419348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:32:39.419382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:32:39.419419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:32:39.420805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.420847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:32:39.420909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:32:39.422282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.422330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.422377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:39.422429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:32:39.422563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:32:39.423757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:32:39.423877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:32:39.424123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:39.424259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:39.424294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:39.424554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:32:39.424603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:32:39.424716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:39.424772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:32:39.426152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:39.426193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:39.426343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:39.426371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:32:39.426704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.426749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:32:39.426831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:39.426861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:39.426889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:32:39.426912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:39.426950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:32:39.426978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:32:39.427005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:32:39.427027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:32:39.427071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:32:39.427100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:32:39.427138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:32:39.427533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:39.427634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:32:39.427674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:32:39.427712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:32:39.427745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:32:39.427845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:32:39.430366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:32:39.430792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:32:39.431784Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:673:2661] Bootstrap 2026-02-19T08:32:39.432685Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:673:2661] Become StateWork (SchemeCache [1:678:2666]) 2026-02-19T08:32:39.434403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:39.434658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.434743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2026-02-19T08:32:39.435099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2026-02-19T08:32:39.435984Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:32:39.437975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:39.438156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:32:39.438578Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> Yq_1::ModifyConnections >> Yq_1::CreateQuery_With_Idempotency >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::Basic_Null >> Yq_1::DescribeJob >> Yq_1::DeleteConnections >> Yq_1::DescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:32:22.999419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:23.113494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:23.132740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:23.132838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:23.133337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:23.423402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:23.423567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:23.498860Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489940285102 != 1771489940285106 2026-02-19T08:32:23.511645Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:23.557898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:23.643147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:32:23.944477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:23.958326Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:24.063479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:24.102315Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:32:24.103521Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:32:24.103831Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:897:2773] 2026-02-19T08:32:24.104066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:24.113981Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828672, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2026-02-19T08:32:24.114997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:897:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:32:24.155051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateInit, received event# 268828673, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2026-02-19T08:32:24.155339Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2026-02-19T08:32:24.155579Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:678: TxInitSchema.Execute 2026-02-19T08:32:24.163384Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateInactive, received event# 268828684, Sender [1:886:2766], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-02-19T08:32:24.163932Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:24.164058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:24.165210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-02-19T08:32:24.165276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037888 2026-02-19T08:32:24.165341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037888 2026-02-19T08:32:24.165618Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:24.165772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:24.165835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037888 persisting started state actor id [1:929:2773] in generation 1 2026-02-19T08:32:24.166074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:744: TxInitSchema.Complete 2026-02-19T08:32:24.166133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:51: TDataShard::TTxInit::Execute 2026-02-19T08:32:24.166986Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1329: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-02-19T08:32:24.167026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1385: LoadLockChangeRecords at tablet: 72075186224037889 2026-02-19T08:32:24.167052Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1434: LoadChangeRecordCommits at tablet: 72075186224037889 2026-02-19T08:32:24.167230Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:95: TDataShard::TTxInit::Complete 2026-02-19T08:32:24.167322Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:103: TDataShard::TTxInitRestored::Execute 2026-02-19T08:32:24.167373Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:155: DataShard 72075186224037889 persisting started state actor id [1:930:2775] in generation 1 2026-02-19T08:32:24.178048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.200929Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-02-19T08:32:24.201123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.201205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037888, actorId: [1:933:2794] 2026-02-19T08:32:24.201243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037888 2026-02-19T08:32:24.201266Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-02-19T08:32:24.201291Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-02-19T08:32:24.201517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:897:2773], Recipient [1:897:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.201558Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.201684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:168: TDataShard::TTxInitRestored::Complete 2026-02-19T08:32:24.201731Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-02-19T08:32:24.201778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-02-19T08:32:24.201822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Change sender created: at tablet: 72075186224037889, actorId: [1:934:2795] 2026-02-19T08:32:24.201844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1269: Trying to activate change sender: at tablet: 72075186224037889 2026-02-19T08:32:24.201859Z node 1 :TX_DATASHARD INFO: datashard.cpp:1286: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-02-19T08:32:24.201874Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-02-19T08:32:24.202108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 2146435072, Sender [1:900:2775], Recipient [1:900:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.202130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3241: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-02-19T08:32:24.202236Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-02-19T08:32:24.202325Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-02-19T08:32:24.202475Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-02-19T08:32:24.202532Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-02-19T08:32:24.202590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-02-19T08:32:24.202628Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-02-19T08:32:24.202659Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-02-19T08:32:24.202690Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-02-19T08:32:24.202739Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-02-19T08:32:24.202784Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-02-19T08:32:24.202826Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-02-19T08:32:24.202908Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269877761, Sender [1:903:2776], Recipient [1:897:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-02-19T08:32:24.202931Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3252: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-02-19T08:32:24.202988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:903:2776], sessionId# [0:0:0] 2026-02-19T08:32:24.203044Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-02-19T08:32:24.203072Z node 1 :TX_DAT ... on plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:32:38.555656Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 72075186224037888 has finished 2026-02-19T08:32:38.566476Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-02-19T08:32:38.566540Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 72075186224037888 on unit DirectOp 2026-02-19T08:32:38.566585Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2026-02-19T08:32:38.723799Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:32:38.723875Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:69:2116] TxId# 281474976715662 ProcessProposeKqpTransaction 2026-02-19T08:32:38.727059Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [3:1407:3190], Recipient [3:895:2774]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-02-19T08:32:38.727303Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-02-19T08:32:38.727363Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037888 CompleteEdge# v23000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v23000/18446744073709551615 ImmediateWriteEdgeReplied# v23000/18446744073709551615 2026-02-19T08:32:38.727409Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037888 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-02-19T08:32:38.727480Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2026-02-19T08:32:38.727567Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-02-19T08:32:38.727605Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2026-02-19T08:32:38.727640Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-02-19T08:32:38.727675Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2026-02-19T08:32:38.727721Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2026-02-19T08:32:38.727757Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-02-19T08:32:38.727783Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-02-19T08:32:38.727803Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2026-02-19T08:32:38.727824Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2026-02-19T08:32:38.727931Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-02-19T08:32:38.728171Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037888 Complete read# {[3:1407:3190], 0} after executionsCount# 1 2026-02-19T08:32:38.728231Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037888 read iterator# {[3:1407:3190], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:32:38.728322Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037888 read iterator# {[3:1407:3190], 0} finished in read 2026-02-19T08:32:38.728378Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-02-19T08:32:38.728403Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2026-02-19T08:32:38.728426Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2026-02-19T08:32:38.728450Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2026-02-19T08:32:38.728493Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-02-19T08:32:38.728515Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2026-02-19T08:32:38.728539Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 72075186224037888 has finished 2026-02-19T08:32:38.728575Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-02-19T08:32:38.728669Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-02-19T08:32:38.730286Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [3:1407:3190], Recipient [3:895:2774]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:32:38.730350Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2026-02-19T08:32:38.860006Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:69:2116] Handle TEvExecuteKqpTransaction 2026-02-19T08:32:38.860065Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:69:2116] TxId# 281474976715663 ProcessProposeKqpTransaction 2026-02-19T08:32:38.866689Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553215, Sender [3:1435:3214], Recipient [3:1119:2935]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2026-02-19T08:32:38.866912Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2754: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-02-19T08:32:38.866973Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2383: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v7000/18446744073709551615 ImmediateWriteEdgeReplied# v7000/18446744073709551615 2026-02-19T08:32:38.867022Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2857: 72075186224037889 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-02-19T08:32:38.867082Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-02-19T08:32:38.867197Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:32:38.867259Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-02-19T08:32:38.867304Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-02-19T08:32:38.867356Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-02-19T08:32:38.867405Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-02-19T08:32:38.867444Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:32:38.867474Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-02-19T08:32:38.867496Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-02-19T08:32:38.867516Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-02-19T08:32:38.867627Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1749: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-02-19T08:32:38.867963Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2398: 72075186224037889 Complete read# {[3:1435:3214], 0} after executionsCount# 1 2026-02-19T08:32:38.868020Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2372: 72075186224037889 read iterator# {[3:1435:3214], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-02-19T08:32:38.868112Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2423: 72075186224037889 read iterator# {[3:1435:3214], 0} finished in read 2026-02-19T08:32:38.868181Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:32:38.868208Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-02-19T08:32:38.868232Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-02-19T08:32:38.868258Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-02-19T08:32:38.868300Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-02-19T08:32:38.868318Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-02-19T08:32:38.868343Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-02-19T08:32:38.868399Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2989: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-02-19T08:32:38.868517Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3038: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-02-19T08:32:38.874285Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3216: StateWork, received event# 269553219, Sender [3:1435:3214], Recipient [3:1119:2935]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-02-19T08:32:38.874376Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3725: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> Yq_1::ListConnections >> Yq_1::Basic >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd0Consumers |95.4%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::TableWithMixedIndexTypes [GOOD] >> TBackupCollectionTests::MultipleTablesWithIndexes >> TestSqsTopicHttpProxy::TestSendMessageBatch [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:57.739297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:57.739380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.739486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:57.739541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:57.739579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:57.739619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:57.739648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:57.739674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:57.739736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.739809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:57.740705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:57.741017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:57.838732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:57.838789Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:57.852538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:57.853173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:57.853353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:57.866368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:57.867066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:57.867823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:57.868106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:57.871067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.871255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:57.872405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:57.872472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:57.872588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:57.872634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:57.872688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:57.872856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:58.028909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.029897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.030906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 12:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:32:39.056730Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-02-19T08:32:39.057222Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:39.057281Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:39.057493Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:39.057556Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:39.057624Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:39.057687Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:39.057773Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-02-19T08:32:39.057844Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:39.057913Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:39.057967Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:39.058189Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-02-19T08:32:39.058266Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-02-19T08:32:39.058327Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-02-19T08:32:39.058398Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-02-19T08:32:39.059446Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:39.059543Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:39.059593Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:39.059665Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-02-19T08:32:39.059741Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:39.061050Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:39.061138Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:39.061173Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:39.061222Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-02-19T08:32:39.061257Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:32:39.061348Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-02-19T08:32:39.065422Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:39.065588Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-02-19T08:32:39.065871Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-02-19T08:32:39.065946Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-02-19T08:32:39.066526Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-02-19T08:32:39.066661Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:39.066729Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:867:2818] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:39.067433Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:39.067771Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 388us result status StatusSuccess 2026-02-19T08:32:39.068410Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:39.069190Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:39.069468Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 314us result status StatusSuccess 2026-02-19T08:32:39.070093Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:39.754377Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__find_subdomain_path_id.cpp:20: FindTabletSubDomainPathId for tablet 72075186233409546 >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.967366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.967454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.967517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.967551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.967578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.967647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.967669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.967685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.967740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.967797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.968440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.968655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:35.038293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:35.038344Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:35.047663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:35.048166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:35.048320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:35.057464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:35.058274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:35.058996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:35.059269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:35.062284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:35.062442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:35.063411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:35.063485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:35.063614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:35.063659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:35.063694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:35.063817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:35.178686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.179989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.180022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.180071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:35.180158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.089026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.089085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.095607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 103:0 ProgressState 2026-02-19T08:32:41.095728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:41.095772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:41.095818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-02-19T08:32:41.095856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:41.095896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-02-19T08:32:41.095961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3117:4334] message: TxId: 103 2026-02-19T08:32:41.096009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-02-19T08:32:41.096080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-02-19T08:32:41.096119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 103:0 2026-02-19T08:32:41.097650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-02-19T08:32:41.100752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-02-19T08:32:41.100818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4370:5523] TestWaitNotification: OK eventTxId 103 2026-02-19T08:32:41.101464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:41.101765Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 319us result status StatusSuccess 2026-02-19T08:32:41.102438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-02-19T08:32:41.105338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:41.105653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2026-02-19T08:32:41.109203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2026-02-19T08:32:41.112534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:41.112762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-02-19T08:32:41.113151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-02-19T08:32:41.113201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-02-19T08:32:41.113673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-02-19T08:32:41.113796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-02-19T08:32:41.113838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4726:5878] TestWaitNotification: OK eventTxId 104 >> TableCreation::MultipleTablesCreation >> KqpProxy::PassErrroViaSessionActor >> ScriptExecutionsTest::RestartQueryWithGetOperation >> TableCreation::ConcurrentTableCreation >> TableCreation::ConcurrentMultipleTablesCreation >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> TBackupCollectionTests::MultipleTablesWithIndexes [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:31:47.793946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:31:47.901898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:31:47.921232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:31:47.921317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:31:47.922091Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:31:48.215309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:48.215460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:48.296287Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771489904888572 != 1771489904888576 2026-02-19T08:31:48.305807Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:48.354886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:48.480075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:31:48.911424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:48.911519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:48.911561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:48.912067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:48.926641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:49.281429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:49.380259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:49.636892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.014003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:50.289978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.096726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1914:3518], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.097134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.097950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1987:3537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.098130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:51.124945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.299157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.554559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:51.833752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:52.082485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:52.340616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:52.608605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:52.916032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:53.269822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:53.269961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:53.270431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:53.270551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:53.270620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:53.276368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:53.443801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2804:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:31:53.503175Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:2864:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:55.693233Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [1:3165:4457] TxId: 281474976715673. Ctx: { TraceId: 01khtgdwrsa81d5pxffp11nx26, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk5NWZlNmYtMTEzMzcwYy02ZWJmNjcxOS0zMDc5ZTU1OQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# 2026-02-19T08:31:55.69 ... lterResource ok# false data# peer# 2026-02-19T08:32:42.123063Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614435280] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-02-19T08:32:42.123297Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261443f380] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-02-19T08:32:42.123303Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614548c80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-02-19T08:32:42.123512Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614481480] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-02-19T08:32:42.123517Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614383180] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-02-19T08:32:42.123730Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261443b480] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-02-19T08:32:42.123747Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614439f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-02-19T08:32:42.123954Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261447ff80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-02-19T08:32:42.123967Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261448b580] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-02-19T08:32:42.124176Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467be80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-02-19T08:32:42.124190Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467da80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-02-19T08:32:42.124385Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614480680] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-02-19T08:32:42.124416Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261448a780] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-02-19T08:32:42.124597Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614489280] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-02-19T08:32:42.124827Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261443c980] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-02-19T08:32:42.125059Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614451980] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-02-19T08:32:42.125267Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144a9180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-02-19T08:32:42.125304Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144aad80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-02-19T08:32:42.125484Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144a8a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-02-19T08:32:42.125548Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26140e2380] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-02-19T08:32:42.125782Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144afa80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-02-19T08:32:42.125999Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26143d8680] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-02-19T08:32:42.126242Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144b3280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-02-19T08:32:42.126487Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144b2b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-02-19T08:32:42.126718Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144b2480] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-02-19T08:32:42.127618Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144b1680] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-02-19T08:32:42.127869Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467e880] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-02-19T08:32:42.127926Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614172280] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-02-19T08:32:42.128118Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614170d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-02-19T08:32:42.128180Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467ef80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-02-19T08:32:42.128348Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614171480] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-02-19T08:32:42.128398Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261416ff80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-02-19T08:32:42.128577Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261416f180] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-02-19T08:32:42.128616Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26141afd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-02-19T08:32:42.128816Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467b780] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-02-19T08:32:42.128844Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467e180] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-02-19T08:32:42.129031Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467cc80] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-02-19T08:32:42.129061Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467d380] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-02-19T08:32:42.129265Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614532080] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-02-19T08:32:42.129275Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261452a980] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-02-19T08:32:42.129484Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145db580] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-02-19T08:32:42.129504Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e4180] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-02-19T08:32:42.129728Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e2c80] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-02-19T08:32:42.129739Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e3380] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-02-19T08:32:42.129936Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261467c580] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-02-19T08:32:42.129990Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614525580] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-02-19T08:32:42.130198Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261452b080] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-02-19T08:32:42.130224Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e3a80] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-02-19T08:32:42.130408Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e2580] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-02-19T08:32:42.130429Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26145e1e80] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-02-19T08:32:42.130638Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26142a3880] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-02-19T08:32:42.130654Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614457480] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-02-19T08:32:42.130848Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614456d80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-02-19T08:32:42.130853Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614456680] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-02-19T08:32:42.131055Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614455f80] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-02-19T08:32:42.131081Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144f5a80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-02-19T08:32:42.131285Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26141ad380] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-02-19T08:32:42.131298Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26141af680] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-02-19T08:32:42.131495Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26142fef80] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-02-19T08:32:42.131511Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d26144cd680] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-02-19T08:32:42.131706Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2614465480] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-02-19T08:32:42.131727Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d261454bd80] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> TableCreation::SimpleTableCreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:37.413087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:37.413165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.413255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:37.413294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:37.413320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:37.413352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:37.413376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:37.413397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:37.413452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.413503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:37.414221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:37.414473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:37.480617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:37.480672Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:37.489702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:37.490199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:37.490347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:37.498883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:37.499474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:37.500088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:37.500348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:37.502860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.503005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:37.503837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:37.503895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.503973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:37.504010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:37.504040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:37.504147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:37.610516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.612748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.612858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.612951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.612989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.613436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ifyTxCompletionResult> complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.250879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:43.250982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:43.251014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:43.251052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:43.251082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:43.251115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:43.251188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3115:4332] message: TxId: 101 2026-02-19T08:32:43.251246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:43.251309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:43.251342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:43.252272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-02-19T08:32:43.255489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:43.255543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:3116:4333] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:43.256085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:43.256303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 255us result status StatusSuccess 2026-02-19T08:32:43.256970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1771489963.257596 596844 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-02-19T08:32:43.259930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:43.260122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:43.260507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2026-02-19T08:32:43.262486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:43.262719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:59.099656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:59.099761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:59.099829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:59.099867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:59.099906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:59.099942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:59.099969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:59.099996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:59.100050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:59.100122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:59.100984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:59.101305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:59.187554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:59.187609Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:59.202618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:59.207455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:59.207614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:59.216178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:59.216962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:59.217626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:59.217898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:59.220647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:59.220852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:59.222000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:59.222060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:59.222166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:59.222207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:59.222265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:59.222426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:59.381916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.382916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:59.383899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... bleState::TProposedWaitParts operationId# 106:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1345 RawX2: 73014447252 } Origin: 72075186233409552 State: 2 TxId: 106 Step: 0 Generation: 2 2026-02-19T08:32:44.599076Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:44.599183Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1094: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-02-19T08:32:44.602284Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.602766Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.615554Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 751 RawX2: 73014446764 } Origin: 72075186233409546 State: 2 TxId: 106 Step: 0 Generation: 2 2026-02-19T08:32:44.615638Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:44.615773Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Source { RawX1: 751 RawX2: 73014446764 } Origin: 72075186233409546 State: 2 TxId: 106 Step: 0 Generation: 2 2026-02-19T08:32:44.615839Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:44.615937Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 751 RawX2: 73014446764 } Origin: 72075186233409546 State: 2 TxId: 106 Step: 0 Generation: 2 2026-02-19T08:32:44.615997Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:44.616062Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.616143Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2026-02-19T08:32:44.616204Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 106:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:44.616267Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 106:0 ProposedWaitParts -> Done 2026-02-19T08:32:44.618396Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.618952Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.619034Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2026-02-19T08:32:44.619121Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-02-19T08:32:44.619212Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2026-02-19T08:32:44.619373Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-02-19T08:32:44.619447Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 106:0 Done -> Done 2026-02-19T08:32:44.621553Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-02-19T08:32:44.621618Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 106:0 ProgressState 2026-02-19T08:32:44.621861Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:32:44.621930Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:32:44.622031Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-02-19T08:32:44.622093Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:32:44.622157Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-02-19T08:32:44.622274Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:807:2772] message: TxId: 106 2026-02-19T08:32:44.622367Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-02-19T08:32:44.622453Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-02-19T08:32:44.622518Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 106:0 2026-02-19T08:32:44.622725Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 47] was 3 2026-02-19T08:32:44.622793Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-02-19T08:32:44.624910Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-02-19T08:32:44.625002Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [17:1375:3242] TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2026-02-19T08:32:44.628195Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateTable CreateTable { Name: "copyChangefeed" CopyFromTable: "/MyRoot/DirA/Table1/Stream" } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:44.628591Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 107:0, explain: Check failed: path: '/MyRoot/DirA/Table1/Stream', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 42], type: EPathTypeCdcStream, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:44.628679Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/DirA/Table1/Stream', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 42], type: EPathTypeCdcStream, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:44.631608Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/DirA/Table1/Stream\', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 42], type: EPathTypeCdcStream, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:44.632098Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/DirA/Table1/Stream', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 42], type: EPathTypeCdcStream, state: EPathStateNoChanges), operation: CREATE TABLE, path: /MyRoot/DirA/copyChangefeed TestModificationResult got TxId: 107, wait until txId: 107 TestModificationResults wait txId: 108 2026-02-19T08:32:44.635235Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateTable CreateTable { Name: "copyStreamImpl" CopyFromTable: "/MyRoot/DirA/Table1/Stream/streamImpl" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:32:44.635635Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: Check failed: path: '/MyRoot/DirA/Table1/Stream/streamImpl', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 43], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:44.635719Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/DirA/Table1/Stream/streamImpl', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 43], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-02-19T08:32:44.638681Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/DirA/Table1/Stream/streamImpl\', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 43], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:44.639145Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/DirA/Table1/Stream/streamImpl', error: path is not a table (id: [OwnerId: 72057594046678944, LocalPathId: 43], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), operation: CREATE TABLE, path: /MyRoot/DirA/copyStreamImpl TestModificationResult got TxId: 108, wait until txId: 108 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TSchemeShardTTLTests::CondEraseOverReboot [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> AssignTxId::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |95.4%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestSqsTopicHttpProxy::TestGetQueueAttributesStd0Consumers [GOOD] >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CondEraseOverReboot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:36.171658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:36.171744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:36.171826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:36.171867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:36.171898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:36.171951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:36.171985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:36.172012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 2, BatchMaxTime 0.100000s 2026-02-19T08:32:36.172075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:36.172144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:36.172907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:36.173195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:36.261308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:36.261358Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:36.272811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:36.273294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:36.273470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:36.283704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:36.284366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:36.285055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:36.285298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:36.288288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:36.288477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:36.289611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:36.289674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:36.289798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:36.289847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:36.289892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:36.290028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:36.438730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.439949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.440013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:36.440067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... 2026-02-19T08:32:43.038113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:43.040042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST 7 ... waiting for single conditional erase batch completed Leader for TabletID 72057594046678944 is [1:986:2925] sender: [1:1106:2058] recipient: [1:15:2062] 2026-02-19T08:32:43.386697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 675 rowCount 3 cpuUsage 0.0003 2026-02-19T08:32:43.440212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7906: Cannot get console configs 2026-02-19T08:32:43.440283Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:43.559527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:32:43.559663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:43.559755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLRebootTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 910.012500s, next: now + 0.000000s, now: 2020-09-18T18:31:20.205500Z, at schemeshard: 72057594046678944 2026-02-19T08:32:43.559861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLRebootTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-02-19T08:32:43.559931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:43.560062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409546, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 1600450280205500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:32:43.561154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:43.597987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7244: Conditional erase finished, tabletId: 72075186233409546, status: OK, batch size 1/5, batch age 0.000000s/0.100000s, enqueued, at schemeshard: 72057594046678944 2026-02-19T08:32:43.628930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-02-19T08:32:43.629103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 675 row count 3 2026-02-19T08:32:43.629168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLRebootTable, is column=0, is olap=0, RowCount 3, DataSize 675 2026-02-19T08:32:43.629322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-02-19T08:32:43.639687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:32:43.652044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7189: Handle: TEvFlushConditionalEraseBatch, at schemeshard: 72057594046678944 2026-02-19T08:32:43.652133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7171: Conditional erase flush pending response batch (by time), batch size 1/5, batch age 0.100000s/0.100000s, at schemeshard: 72057594046678944 2026-02-19T08:32:43.652204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:43.652253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:43.654030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 ... waiting for single conditional erase batch completed (done) 2026-02-19T08:32:43.654323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:43.654404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLRebootTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 899.999000s, now: 2020-09-18T18:31:20.308500Z, at schemeshard: 72057594046678944 2026-02-19T08:32:43.654462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:32:46.130254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 675 rowCount 3 cpuUsage 0.0003 TEST 8 2026-02-19T08:32:46.132270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLRebootTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-02-19T08:32:46.132611Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLRebootTable" took 450us result status StatusSuccess 2026-02-19T08:32:46.133568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLRebootTable" PathDescription { Self { Name: "TTLRebootTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLRebootTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "ts" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 900000000 RetryInterval: 900000000 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 675 RowCount: 3 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 43 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3 Memory: 4318672 Network: 0 Storage: 131 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 675 DataSize: 675 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 43 DataSize: 43 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2026-02-19T08:32:41.905235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491448171238840:2073];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:41.905382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:42.278019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:42.336560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:42.336667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:42.409877Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491448171238803:2081] 1771489961900677 != 1771489961900680 2026-02-19T08:32:42.419994Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:42.426822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:42.542518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:42.902188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:42.902208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:42.902294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:42.902405Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:42.911602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:43.511740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:43.522103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:45.287620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491465351108887:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:45.287745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:45.288154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491465351108896:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:45.288224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:45.887671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-02-19T08:32:45.904486Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2026-02-19T08:32:45.904570Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2026-02-19T08:32:45.907795Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2026-02-19T08:32:45.907874Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:245: [controller 72075186224037888][TxInit] Execute 2026-02-19T08:32:45.908131Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:250: [controller 72075186224037888][TxInit] Complete 2026-02-19T08:32:45.908155Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:128: [controller 72075186224037888] SwitchToWork 2026-02-19T08:32:45.915753Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:171: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2061" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-02-19T08:32:45.915986Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976715658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2061" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-02-19T08:32:45.916070Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-02-19T08:32:45.917838Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2026-02-19T08:32:45.917889Z node 1 :REPLICATION_CONTROLLER INFO: tx_create_replication.cpp:68: [controller 72075186224037888][TxCreateReplication] Discover tenant nodes: tenant# /Root 2026-02-19T08:32:45.920282Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:335: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2026-02-19T08:32:45.920349Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:359: [controller 72075186224037888] Create session: nodeId# 1 2026-02-19T08:32:45.952958Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-02-19T08:32:45.953002Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2026-02-19T08:32:45.953382Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-02-19T08:32:45.953475Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-02-19T08:32:45.953510Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2026-02-19T08:32:45.953906Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2026-02-19T08:32:45.987650Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2026-02-19T08:32:45.987719Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2026-02-19T08:32:45.987768Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2026-02-19T08:32:45.987847Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:32:45.987892Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2026-02-19T08:32:45.988359Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-02-19T08:32:45.988708Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2026-02-19T08:32:45.988773Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-02-19T08:32:45.988821Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-02-19T08:32:45.989042Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2026-02-19T08:32:45.989066Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-02-19T08:32:45.989110Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-02-19T08:32:45.989408Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2026-02-19T08:32:45.989432Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-02-19T08:32:45.989739Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-02-19T08:32:45.989956Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2026-02-19T08:32:45.989990Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2026-02-19T08:32:45.990027Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-02-19T08:32:45.990344Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2026-02-19T08:32:45.990386Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2026-02-19T08:32:45.990714Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2026-02-19T08:32:45.990816Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:32:45.990845Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2026-02-19T08:32:45.990868Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2026-02-19T08:32:45.991329Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2026-02-19T08:32:45.991353Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2026-02-19T08:32:45.991599Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq0Consumers >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes [GOOD] >> TBackupCollectionTests::OmitIndexesFlag >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] |95.4%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-02-19T08:32:47.443331Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:47.453949Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:47.455201Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:47.455283Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.455344Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:47.457162Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:258:2252], now have 1 active actors on pipe 2026-02-19T08:32:47.457312Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:47.478811Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:47.478969Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.484228Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:47.485198Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:47.485684Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:47.486927Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:266:2223] 2026-02-19T08:32:47.490509Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:47.490585Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:47.490627Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:266:2223] 2026-02-19T08:32:47.491423Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:47.491496Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:47.491534Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:47.491568Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:47.492819Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:47.492865Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:47.492907Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:47.492942Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:47.494127Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:47.494998Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:47.495520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:269:2257], now have 1 active actors on pipe 2026-02-19T08:32:47.541836Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:47.545244Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:47.545532Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-02-19T08:32:47.545578Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.545621Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928137] doesn't have tx writes info 2026-02-19T08:32:47.546238Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-02-19T08:32:47.546345Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:47.548243Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:47.548478Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.549124Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:47.549242Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:47.549504Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:47.549733Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:407:2326] 2026-02-19T08:32:47.551232Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:47.551280Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-02-19T08:32:47.551318Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:407:2326] 2026-02-19T08:32:47.551374Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:47.551415Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:47.551447Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:47.551474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:47.551528Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:47.551569Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:47.551595Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:47.551618Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928137][Partition][0][StateIdle] Try persist 2026-02-19T08:32:47.551675Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:47.551832Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:47.552212Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-02-19T08:32:47.571687Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:47.574790Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:47.575086Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-02-19T08:32:47.575142Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.575212Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928138] doesn't have tx writes info 2026-02-19T08:32:47.575917Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [2:459:2396], now have 1 active actors on pipe 2026-02-19T08:32:47.576084Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:47.578145Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:47.578250Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:47.578924Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions ... 28138][Partition][1][StateIdle] Try persist 2026-02-19T08:32:48.988196Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:48.988401Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-02-19T08:32:48.988880Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [4:472:2403], now have 1 active actors on pipe 2026-02-19T08:32:49.004114Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.007735Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.008050Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:49.008130Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.008187Z node 4 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:49.008895Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:521:2439], now have 1 active actors on pipe 2026-02-19T08:32:49.009022Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.011114Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.011254Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.011808Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 12 actor [4:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.011944Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.012250Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.012458Z node 4 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:529:2410] 2026-02-19T08:32:49.014529Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.014592Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:49.014645Z node 4 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2410] 2026-02-19T08:32:49.014703Z node 4 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.014762Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.014805Z node 4 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.014844Z node 4 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:49.014883Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.014920Z node 4 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.014963Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.015000Z node 4 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:49.015091Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.015308Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.015780Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:532:2444], now have 1 active actors on pipe 2026-02-19T08:32:49.016902Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [4:538:2447], now have 1 active actors on pipe 2026-02-19T08:32:49.017149Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [4:539:2448], now have 1 active actors on pipe 2026-02-19T08:32:49.017372Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:540:2448], now have 1 active actors on pipe 2026-02-19T08:32:49.028439Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:548:2455], now have 1 active actors on pipe 2026-02-19T08:32:49.051829Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.053984Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.054781Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.054831Z node 4 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:49.054952Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.055228Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.055430Z node 4 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:604:2458] 2026-02-19T08:32:49.057287Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:32:49.058379Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:32:49.058629Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:32:49.058736Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:32:49.058958Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:32:49.059031Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:32:49.059221Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:32:49.059265Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:32:49.059306Z node 4 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:32:49.059340Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:32:49.059442Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:32:49.059639Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:32:49.059685Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.059727Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:49.059773Z node 4 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:604:2458] 2026-02-19T08:32:49.059819Z node 4 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.059878Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.059920Z node 4 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.059954Z node 4 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:49.059990Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.060027Z node 4 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.060068Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.060104Z node 4 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:49.060185Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.060353Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.060907Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [4:539:2448] destroyed 2026-02-19T08:32:49.060967Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [4:538:2447] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TBackupCollectionTests::OmitIndexesFlag [GOOD] >> TBackupCollectionTests::IndexCdcStreamCountRotation >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-02-19T08:32:49.095349Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.099250Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.099549Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:49.099612Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.099683Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:49.100406Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:258:2252], now have 1 active actors on pipe 2026-02-19T08:32:49.100511Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.117403Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:49.117594Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.118412Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:49.118569Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.118981Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.119339Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:266:2223] 2026-02-19T08:32:49.121599Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.121659Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:49.121700Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:266:2223] 2026-02-19T08:32:49.121766Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.121834Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.121876Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.121913Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:49.121955Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.121992Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.122029Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.122066Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:49.122177Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.122410Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.122951Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:269:2257], now have 1 active actors on pipe 2026-02-19T08:32:49.171353Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.175883Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.176214Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:49.176271Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.176330Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:49.177032Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-02-19T08:32:49.177160Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.179456Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.179604Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.180375Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.180507Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.180818Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.181033Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2326] 2026-02-19T08:32:49.183118Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.183175Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:49.183235Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2326] 2026-02-19T08:32:49.183284Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.183337Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.183380Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.183432Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:49.183470Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.183504Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.183543Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.183573Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:49.183655Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.183912Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.184340Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-02-19T08:32:49.185538Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:416:2363], now have 1 active actors on pipe 2026-02-19T08:32:49.186766Z node 2 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:32:49.187139Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-02-19T08:32:49.187520Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [2:416:2363] destroyed 2026-02-19T08:32:49.187652Z node 2 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:32:49.188521Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928139] server disconnected, pipe [2:418:2364] destroyed 2026-02-19T08:32:49.749085Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.752703Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.752996Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:49.753055Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.753106Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026- ... itionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:32:49.871395Z node 3 :PERSQUEUE DEBUG: partition.cpp:1031: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-02-19T08:32:49.882497Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:547:2453], now have 1 active actors on pipe 2026-02-19T08:32:49.908948Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.910963Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.912219Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.912277Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:49.912387Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.912724Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.912970Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:603:2456] 2026-02-19T08:32:49.914902Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:32:49.916173Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:32:49.916481Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:32:49.916598Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:32:49.916876Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:32:49.916955Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:32:49.917179Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:32:49.917222Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:32:49.917268Z node 3 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:32:49.917308Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:32:49.917397Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:32:49.917663Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:32:49.917724Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.917764Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:49.917810Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:603:2456] 2026-02-19T08:32:49.917861Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.917918Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.917960Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.917996Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:49.918033Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.918069Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.918103Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.918138Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:49.918275Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.918458Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.919142Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [3:538:2446] destroyed 2026-02-19T08:32:49.919438Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [3:537:2445] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 92 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 92 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> KqpNamedExpressions::NamedExpressionRandomUpsertIndex+UseSink+UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning+UseSink+UseDataQuery >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> ScriptExecutionsTest::RunCheckLeaseStatus >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::RollbackTableAcl >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-02-19T08:32:49.311743Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.315575Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.315924Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:49.316002Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.316073Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:49.316848Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-02-19T08:32:49.316966Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.336391Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:49.336580Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.337385Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:49.337559Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.338057Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.338420Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-02-19T08:32:49.340825Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.340898Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:49.340941Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-02-19T08:32:49.340998Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.341062Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.341116Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.341152Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:49.341198Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.341236Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.341275Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.341307Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:49.341424Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.341644Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.342174Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-02-19T08:32:49.392199Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.396288Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.396633Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:49.396690Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.396766Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:49.397475Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:398:2354], now have 1 active actors on pipe 2026-02-19T08:32:49.397518Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.399870Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.400032Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.400752Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:49.400888Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:49.401189Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:49.401425Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:406:2325] 2026-02-19T08:32:49.403410Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:49.403464Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:49.403503Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:406:2325] 2026-02-19T08:32:49.403545Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:49.403596Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:49.403660Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:49.403699Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:49.403733Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.403764Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:49.403802Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:49.403837Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:49.403922Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:49.404133Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:49.404675Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:409:2359], now have 1 active actors on pipe 2026-02-19T08:32:49.405950Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:415:2362], now have 1 active actors on pipe 2026-02-19T08:32:49.406288Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:417:2363], now have 1 active actors on pipe 2026-02-19T08:32:49.406433Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [2:415:2362] destroyed 2026-02-19T08:32:49.406854Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928139] server disconnected, pipe [2:417:2363] destroyed 2026-02-19T08:32:49.961499Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:49.965083Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:49.965387Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:49.965450Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:49.965509Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:49.966284Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:259:2252], now have 1 active actors on pipe 2026-02-19T08:32:49.966358Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:49.968565Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:49. ... T08:32:50.073412Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:50.073647Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-02-19T08:32:50.074326Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [3:470:2400], now have 1 active actors on pipe 2026-02-19T08:32:50.093186Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:50.097902Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:50.098347Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:50.098420Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:50.098488Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:50.099329Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:519:2436], now have 1 active actors on pipe 2026-02-19T08:32:50.099452Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:50.102245Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:50.102380Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:50.103227Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 6 actor [3:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:50.103371Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:50.103680Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:50.103890Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:527:2407] 2026-02-19T08:32:50.105545Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:50.105615Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:50.105661Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:527:2407] 2026-02-19T08:32:50.105730Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:50.105793Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:50.105830Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:50.105858Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:50.105888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.105918Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:50.105955Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.105986Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:50.106069Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:50.106285Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:50.106760Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:530:2441], now have 1 active actors on pipe 2026-02-19T08:32:50.107977Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [3:537:2445], now have 1 active actors on pipe 2026-02-19T08:32:50.108045Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2026-02-19T08:32:50.108093Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:538:2445], now have 1 active actors on pipe 2026-02-19T08:32:50.119326Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:543:2449], now have 1 active actors on pipe 2026-02-19T08:32:50.153044Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:50.155558Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:50.156806Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:50.156870Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:50.157009Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:50.157369Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:50.157618Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:599:2452] 2026-02-19T08:32:50.159748Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:32:50.161138Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:32:50.161414Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:32:50.161543Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:32:50.161832Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:32:50.161924Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:32:50.162142Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:32:50.162200Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:32:50.162250Z node 3 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:32:50.162294Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:32:50.162409Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:32:50.162644Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:32:50.162692Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:50.162746Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:50.162824Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:599:2452] 2026-02-19T08:32:50.162887Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:50.162960Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:50.163009Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:50.163051Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:50.163094Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.163134Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:50.163199Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.163243Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:50.163343Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:50.163552Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:50.164184Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [3:537:2445] destroyed 2026-02-19T08:32:50.164457Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [3:536:2444] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:30.812781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:30.812851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.812947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:30.812992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:30.813016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:30.813043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:30.813071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:30.813094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:30.813150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.813363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:30.814044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:30.814283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:30.887319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:30.887398Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.898305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:30.898875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:30.899000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:30.907381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:30.908051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:30.908682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.909018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:30.912013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.912179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.913004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.913044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.913115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.913150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.913175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.913309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:31.029414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:31.030754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... ecute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:50.621212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:32:50.621271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000039, at schemeshard: 72057594046678944 2026-02-19T08:32:50.621572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 Propose -> ProposedWaitParts 2026-02-19T08:32:50.621761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-02-19T08:32:50.627737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:50.627792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-02-19T08:32:50.628064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:50.628122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:32:50.628556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:50.628613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:32:50.629560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:50.629669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:32:50.629750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:32:50.629808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-02-19T08:32:50.629868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:32:50.629966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:32:50.630639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1234 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:50.630695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:50.630850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1234 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:32:50.630988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1234 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-02-19T08:32:50.631740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:50.631788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:32:50.631929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:50.631994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:32:50.632091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:32:50.632161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:50.632210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:50.632254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:32:50.632328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:32:50.635627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:32:50.636098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:50.636225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:50.636510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:32:50.636559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:32:50.636667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:50.636704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:50.636742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:32:50.636789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:50.636831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:32:50.636903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2713] message: TxId: 102 2026-02-19T08:32:50.636956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:32:50.636990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:32:50.637021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:32:50.637156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:32:50.638991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:32:50.639044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1014:2965] TestWaitNotification: OK eventTxId 102 2026-02-19T08:32:50.639465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:50.639533Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__conditional_erase.cpp:422: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2026-02-19T08:32:50.641344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:50.641504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:32:50.641591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTable, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], shardIdx: 72057594046678944:1, tabletId: 72075186233409546, last: now - 0.001000s, next: now + 299.999000s, now: 1970-01-01T00:01:00.047500Z, at schemeshard: 72057594046678944 2026-02-19T08:32:50.641644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::TableCreationWithAcl >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-02-19T08:32:50.952872Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:50.956615Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:50.956929Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:50.956996Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:50.957066Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:50.957931Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:261:2254], now have 1 active actors on pipe 2026-02-19T08:32:50.958023Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:50.982123Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:50.982286Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:50.983146Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [3:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:50.983360Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:50.983782Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:50.984113Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:269:2225] 2026-02-19T08:32:50.986543Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:50.986603Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:50.986647Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:269:2225] 2026-02-19T08:32:50.986702Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:50.986771Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:50.986828Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:50.986865Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:50.986910Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.986943Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:50.986983Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:50.987018Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:50.987167Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:50.987418Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:50.987887Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:272:2259], now have 1 active actors on pipe 2026-02-19T08:32:51.032239Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.035632Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.035921Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-02-19T08:32:51.035978Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.036064Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928137] doesn't have tx writes info 2026-02-19T08:32:51.036781Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [3:399:2354], now have 1 active actors on pipe 2026-02-19T08:32:51.036894Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.038987Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.039137Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.039844Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928137] Config applied version 2 actor [3:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.039951Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.040245Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.040497Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:407:2325] 2026-02-19T08:32:51.042563Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.042627Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-02-19T08:32:51.042668Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:407:2325] 2026-02-19T08:32:51.042719Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.042773Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.042896Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.042934Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:51.042971Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.043003Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.043043Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.043074Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928137][Partition][0][StateIdle] Try persist 2026-02-19T08:32:51.043155Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.043397Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.043933Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [3:410:2359], now have 1 active actors on pipe 2026-02-19T08:32:51.061046Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.064074Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.064382Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-02-19T08:32:51.064439Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.064498Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928138] doesn't have tx writes info 2026-02-19T08:32:51.065309Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [3:459:2395], now have 1 active actors on pipe 2026-02-19T08:32:51.065416Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.067616Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:51.067731Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.068563Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928138] Config applied version 3 actor [3:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInfli ... 27626Z node 4 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928138][Partition][1][StateIdle] Try persist 2026-02-19T08:32:51.727720Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.727957Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.728416Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [4:471:2401], now have 1 active actors on pipe 2026-02-19T08:32:51.745212Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.748415Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.748745Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:51.748827Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.748891Z node 4 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:51.749668Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:520:2437], now have 1 active actors on pipe 2026-02-19T08:32:51.749811Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.752072Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.752208Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.753142Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 8 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.753300Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.753668Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.753946Z node 4 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:528:2408] 2026-02-19T08:32:51.756015Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.756079Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:51.756135Z node 4 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:528:2408] 2026-02-19T08:32:51.756190Z node 4 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.756255Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.756297Z node 4 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.756359Z node 4 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:51.756403Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.756439Z node 4 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.756480Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.756515Z node 4 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:51.756609Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.756836Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.757334Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:531:2442], now have 1 active actors on pipe 2026-02-19T08:32:51.758668Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [4:537:2445], now have 1 active actors on pipe 2026-02-19T08:32:51.758762Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2026-02-19T08:32:51.758884Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:539:2446], now have 1 active actors on pipe 2026-02-19T08:32:51.770578Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [4:544:2450], now have 1 active actors on pipe 2026-02-19T08:32:51.805450Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.807880Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.808902Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.808968Z node 4 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:51.809093Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.809354Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.809570Z node 4 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:600:2453] 2026-02-19T08:32:51.810996Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:32:51.811989Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:32:51.812291Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:32:51.812411Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:32:51.812624Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:32:51.812691Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:32:51.812872Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:32:51.812907Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:32:51.812939Z node 4 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:32:51.812970Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:32:51.813054Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:32:51.813299Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:32:51.813340Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.813385Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:51.813429Z node 4 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:600:2453] 2026-02-19T08:32:51.813472Z node 4 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.813523Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.813570Z node 4 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.813603Z node 4 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:51.813645Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.813691Z node 4 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.813751Z node 4 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.813790Z node 4 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:51.813879Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.814070Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.814909Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [4:538:2446] destroyed 2026-02-19T08:32:51.814977Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [4:537:2445] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-02-19T08:32:51.198720Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.202181Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.202505Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:51.202569Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.202629Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:51.203335Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-02-19T08:32:51.203441Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.221387Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.221544Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.222264Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.222412Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.222814Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.223066Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-02-19T08:32:51.224956Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.225012Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:51.225053Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-02-19T08:32:51.225099Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.225159Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.225198Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.225230Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:51.225274Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.225305Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.225338Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.225373Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:51.225483Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.225723Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.226228Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-02-19T08:32:51.270884Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.274618Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.274896Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-02-19T08:32:51.274949Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.275004Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928138] doesn't have tx writes info 2026-02-19T08:32:51.275716Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [2:397:2353], now have 1 active actors on pipe 2026-02-19T08:32:51.275819Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.278106Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:51.278209Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.278903Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928138] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:51.279021Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.279309Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.279512Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:405:2324] 2026-02-19T08:32:51.281446Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.281498Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2026-02-19T08:32:51.281540Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:405:2324] 2026-02-19T08:32:51.281579Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.281628Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.281662Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.281729Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928138][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:32:51.281765Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.281794Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928138][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.281827Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.281860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928138][Partition][1][StateIdle] Try persist 2026-02-19T08:32:51.281937Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.282132Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.282567Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [2:408:2358], now have 1 active actors on pipe 2026-02-19T08:32:51.297804Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.300609Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.300890Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:51.300944Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.300995Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:51.301689Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:457:2394], now have 1 active actors on pipe 2026-02-19T08:32:51.301840Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.306067Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.306173Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.306835Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.306972Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.307277Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.307483Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:465:2365] 2026-02-19T08:32:51.309220Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.309272Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:51.309310Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:465:2365] 2026-02-19T08:32:51.309353Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.309401Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.309442Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.309474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:51.309527Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.309559Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.309591Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.309622Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:51.309694Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.309903Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.310362Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:468:2399], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2026-02-19T08:32:51.318479Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:475:2402], now have 1 active actors on pipe 2026-02-19T08:32:51.318945Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [2:478:2403], now have 1 active actors on pipe 2026-02-19T08:32:51.319345Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:479:2403], now have 1 active actors on pipe 2026-02-19T08:32:51.321120Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [2:475:2402] destroyed 2026-02-19T08:32:51.321807Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [2:478:2403] destroyed 2026-02-19T08:32:51.321913Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928139] server disconnected, pipe [2:479:2403] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-02-19T08:32:51.136265Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.140234Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.140525Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:51.140593Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.140656Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:51.141447Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-02-19T08:32:51.141566Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.161780Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.161958Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.162618Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.162751Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.163121Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.163412Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-02-19T08:32:51.165509Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.165567Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:51.165602Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-02-19T08:32:51.165641Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.165701Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.165752Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.165782Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:51.165830Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.165870Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.165911Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.165946Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:51.166049Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.166295Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.166805Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-02-19T08:32:51.223676Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.227806Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.228159Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:51.228251Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.228327Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:51.229134Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:400:2356], now have 1 active actors on pipe 2026-02-19T08:32:51.229198Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.231655Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.231802Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.232626Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:51.232800Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.233161Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.233412Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:408:2327] 2026-02-19T08:32:51.235640Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.235701Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:51.235753Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:408:2327] 2026-02-19T08:32:51.235818Z node 2 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.235891Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.235932Z node 2 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.235964Z node 2 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:51.236001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.236036Z node 2 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.236072Z node 2 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.236111Z node 2 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:51.236196Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.236444Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.236988Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:411:2361], now have 1 active actors on pipe 2026-02-19T08:32:51.238328Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [2:417:2364], now have 1 active actors on pipe 2026-02-19T08:32:51.238825Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [2:419:2365], now have 1 active actors on pipe 2026-02-19T08:32:51.239139Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [2:417:2364] destroyed 2026-02-19T08:32:51.239629Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928139] server disconnected, pipe [2:419:2365] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2026-02-19T08:32:51.128957Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.133292Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.133629Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-02-19T08:32:51.133702Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.133792Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928037] doesn't have tx writes info 2026-02-19T08:32:51.134510Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [1:261:2254], now have 1 active actors on pipe 2026-02-19T08:32:51.134599Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.166878Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.167076Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.168368Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928037] Config applied version 1 actor [1:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.168591Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.169023Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.169416Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:269:2225] 2026-02-19T08:32:51.172098Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.172164Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-02-19T08:32:51.172215Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:269:2225] 2026-02-19T08:32:51.172270Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.172351Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.172407Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.172460Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:51.172513Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.172551Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.172589Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.172627Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928037][Partition][0][StateIdle] Try persist 2026-02-19T08:32:51.172746Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.173031Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.173611Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [1:272:2259], now have 1 active actors on pipe 2026-02-19T08:32:51.237880Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.240969Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.241194Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-02-19T08:32:51.241265Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.241321Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928137] doesn't have tx writes info 2026-02-19T08:32:51.242014Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [1:402:2357], now have 1 active actors on pipe 2026-02-19T08:32:51.242148Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.246701Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.246843Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.247706Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928137] Config applied version 2 actor [1:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-02-19T08:32:51.247846Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:51.248200Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:51.248467Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:410:2328] 2026-02-19T08:32:51.250392Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:51.250440Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-02-19T08:32:51.250474Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:410:2328] 2026-02-19T08:32:51.250520Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:51.250573Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:51.250614Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-02-19T08:32:51.250644Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:32:51.250670Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.250707Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:51.250739Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:51.250770Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928137][Partition][0][StateIdle] Try persist 2026-02-19T08:32:51.250858Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:51.251028Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:32:51.251421Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [1:413:2362], now have 1 active actors on pipe 2026-02-19T08:32:51.266940Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:51.270634Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:51.271085Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-02-19T08:32:51.271138Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.271209Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928138] doesn't have tx writes info 2026-02-19T08:32:51.271898Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [1:462:2398], now have 1 active actors on pipe 2026-02-19T08:32:51.271998Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:51.274725Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:51.274845Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:51.275692Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928138] Config applied version 3 actor [1:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-02-19T08:32:51.275816Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initial ... 00, To tx_18446744073709551615 2026-02-19T08:32:52.789810Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:789: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-02-19T08:32:52.789885Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:52.789961Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:52.790730Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:519:2436], now have 1 active actors on pipe 2026-02-19T08:32:52.790860Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1292: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-02-19T08:32:52.793184Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:52.793315Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:52.793932Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037928139] Config applied version 12 actor [3:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-02-19T08:32:52.794074Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:52.794387Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:52.794620Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:527:2407] 2026-02-19T08:32:52.796577Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:52.796637Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:52.796686Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:527:2407] 2026-02-19T08:32:52.796742Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:52.796802Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:52.796848Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:52.796889Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:52.796932Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:52.796970Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:52.797017Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:52.797055Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:52.797140Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:52.797347Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:52.797770Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:530:2441], now have 1 active actors on pipe 2026-02-19T08:32:52.799198Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928037] server connected, pipe [3:537:2444], now have 1 active actors on pipe 2026-02-19T08:32:52.799654Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928137] server connected, pipe [3:539:2445], now have 1 active actors on pipe 2026-02-19T08:32:52.799742Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928138] server connected, pipe [3:540:2445], now have 1 active actors on pipe 2026-02-19T08:32:52.799858Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:541:2445], now have 1 active actors on pipe 2026-02-19T08:32:52.800459Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2807: [PQ: 72057594037928139] server connected, pipe [3:554:2456], now have 1 active actors on pipe 2026-02-19T08:32:52.821655Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3046: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-02-19T08:32:52.823769Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3078: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-02-19T08:32:52.824645Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:32:52.824702Z node 3 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037928139] doesn't have tx writes info 2026-02-19T08:32:52.824822Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:32:52.825102Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:32:52.825306Z node 3 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:610:2459] 2026-02-19T08:32:52.827445Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-02-19T08:32:52.828599Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-02-19T08:32:52.828842Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-02-19T08:32:52.828959Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From m0000000002 to m0000000003 2026-02-19T08:32:52.829241Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-02-19T08:32:52.829327Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From d0000000002 to d0000000003 2026-02-19T08:32:52.829522Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-02-19T08:32:52.829567Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-02-19T08:32:52.829614Z node 3 :PERSQUEUE INFO: partition_init.cpp:1037: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-02-19T08:32:52.829658Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-02-19T08:32:52.829787Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1516: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-02-19T08:32:52.830007Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-02-19T08:32:52.830056Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:32:52.830100Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-02-19T08:32:52.830148Z node 3 :PERSQUEUE INFO: partition.cpp:713: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:610:2459] 2026-02-19T08:32:52.830205Z node 3 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:32:52.830265Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:32:52.830316Z node 3 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-02-19T08:32:52.830352Z node 3 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-02-19T08:32:52.830390Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:52.830429Z node 3 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-02-19T08:32:52.830469Z node 3 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:32:52.830509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037928139][Partition][2][StateIdle] Try persist 2026-02-19T08:32:52.830594Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:32:52.830752Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-02-19T08:32:52.831567Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928037] server disconnected, pipe [3:537:2444] destroyed 2026-02-19T08:32:52.831622Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928137] server disconnected, pipe [3:539:2445] destroyed 2026-02-19T08:32:52.831696Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2832: [PQ: 72057594037928138] server disconnected, pipe [3:540:2445] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoDlq0Consumers [GOOD] >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> KqpExtractPredicateLookup::PointJoin >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue [GOOD] |95.5%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNamedExpressions::NamedExpressionRandomInsertDataQuery+UseSink [GOOD] >> KqpQueryService::StreamExecuteQueryPure >> KqpQueryService::DdlUser >> KqpQueryService::ShowCreateTableNotSuccess >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoWithConsumersRetentionExtended >> KqpQueryService::CreateTempTable >> KqpQueryServiceScripts::ValidateScript >> KqpDocumentApi::RestrictWriteExplicitPrepare >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery >> KqpService::Shutdown >> KqpQueryService::DdlGroup >> KqpQueryService::ShowCreateTable >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> KqpQueryService::TableSink_OltpUpsert >> TBackupCollectionTests::IndexCdcStreamCountRotation [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams >> KqpQueryService::SessionFromPoolError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNamedExpressions::NamedExpressionRandomInsertDataQuery+UseSink [GOOD] Test command err: 2026-02-19T08:29:55.287494Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490736110887431:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:55.288013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:55.486520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:55.500999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:55.501105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:55.559400Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:55.560497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:55.561262Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490736110887390:2081] 1771489795285677 != 1771489795285680 2026-02-19T08:29:55.655376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:55.655399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:55.655409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:55.655509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:55.778185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:56.044224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:56.049994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:29:56.101785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:56.229964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:56.326671Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:56.369157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:56.436988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.160857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490748995791156:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.160973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.161389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490748995791166:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.161476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.414725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.442578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.469893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.498312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.526739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.557848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.615987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.666117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:58.726129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490748995792038:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.726213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.726321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490748995792043:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.726382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490748995792045:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.726429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:58.729906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:58.739669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490748995792047:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:58.842079Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490748995792098:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:30:00.287625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490736110887431:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:30:00.287716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [[%true]] [[["52358474-d373-49b9-b069-df650ee2e0e1"]];[["b134dce1-7bab-454b-bf2b-9a342942de95"]]] [[["52358474-d373-49b9-b069-df650ee2e0e1"]];[["b134dce1-7bab-454b-bf2b-9a342942de95"]]] 2026-02-19T08:30:02.514909Z no ... ;actor=TTableExistsActor;event=undelivered;self_id=[18:7608491460395970792:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.739468Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:43.795215Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:43.888842Z node 18 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:43.890345Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:43.890456Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:43.892407Z node 18 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [18:7608491460395970629:2081] 1771489963729161 != 1771489963729164 2026-02-19T08:32:43.914522Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.030544Z node 18 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:44.030578Z node 18 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:44.030593Z node 18 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:44.030695Z node 18 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:44.088689Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:44.528606Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:44.535883Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:44.546944Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:44.641182Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:44.888824Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:44.962800Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:45.068846Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:48.739214Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7608491460395970792:2175];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:48.739306Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:49.299603Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7608491486165776287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.299763Z node 18 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.300086Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7608491486165776296:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.300168Z node 18 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.405965Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.448724Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.488869Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.527743Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.569794Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.617454Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.660183Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.721783Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.846153Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7608491486165777171:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.846290Z node 18 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.849984Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7608491486165777176:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.849996Z node 18 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7608491486165777177:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.850126Z node 18 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.855053Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:49.867959Z node 18 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [18:7608491486165777180:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:32:49.943979Z node 18 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [18:7608491486165777241:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [["3f78b7eb-9c3e-443f-ae10-70ea62964b73"];["7a7daadc-cf09-485e-8adb-bbc3a15745a1"]] [[["5ed55aab-f224-458b-bfbf-77c4401db646"]];[["ea6aee1f-258a-464e-8d83-c95fe535c755"]]] [[["9351dfab-6ae9-4638-a266-0586e20c7cef"]];[["ed4150cf-92f8-44fb-8e45-cd15e62b304b"]]] >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::PingNotExistedSession >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> TableCreation::RollbackTableAcl [GOOD] >> TableCreation::CreateOldTable >> KqpQueryServiceScripts::ParseScript ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:30.523549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:30.523619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.523692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:30.523732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:30.523780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:30.523814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:30.523842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:30.523867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:30.523936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:30.523995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:30.524812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:30.525065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:30.610066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:30.610185Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:30.621705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:30.622703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:30.622917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:30.633961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:30.636676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:30.637377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:30.637658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:30.641031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.641238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:30.642094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:30.642150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:30.642259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:30.642319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:30.642363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:30.642527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:30.785022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.785891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:30.786812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... umnShard::TEvNotifyTxCompletionResult> complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.444856Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.444946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.445037Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446229Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446299Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446366Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446495Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446553Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.446617Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449466Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449783Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449851Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.449962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:32:57.450019Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:32:57.450148Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:57.450191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:57.450241Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:32:57.450282Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:57.450322Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:32:57.450401Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:3113:4331] message: TxId: 101 2026-02-19T08:32:57.450456Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:32:57.450528Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:32:57.450563Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:32:57.451650Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-02-19T08:32:57.454606Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:32:57.454682Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:3114:4332] TestWaitNotification: OK eventTxId 101 2026-02-19T08:32:57.455251Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:32:57.455537Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 301us result status StatusSuccess 2026-02-19T08:32:57.456153Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } IsRestore: false } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TableCreation::TableCreationWithAcl [GOOD] >> TableCreation::SimpleUpdateTable >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: 2026-02-19T08:31:41.135278Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491194084842745:2149];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:31:41.135644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:31:41.437492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:31:41.437595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:31:41.517035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:31:41.523256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:31:41.583598Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:41.588056Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491194084842629:2081] 1771489901053130 != 1771489901053133 2026-02-19T08:31:41.686506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:31:41.686528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:31:41.686538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:31:41.686609Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:31:41.703989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:31:42.071840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:31:42.134149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:31:42.139251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:31:44.632307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491206969745818:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.632308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491206969745830:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.632387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.632629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491206969745833:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.632695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:31:44.635872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:31:44.644564Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491206969745832:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:31:44.730887Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491206969745885:2814] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:31:45.100119Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2026-02-19T08:31:45.100178Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2026-02-19T08:31:45.116313Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:738: SelfId: [1:7608491211264713213:2360], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01khtgdh28a8nc5tzytgmednk8. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmY5MTJjMDgtNzEwZjU1YjUtYTg2OTk5ZWEtNWQ0ZmVhYTQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-fkrokuvqke, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-02-19T08:31:45.035994Z }, code: 2029 }. 2026-02-19T08:31:45.158078Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037888 Cancelled read: {[1:7608491211264713214:2360], 0} 2026-02-19T08:31:45.158133Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037889 Cancelled read: {[1:7608491211264713214:2360], 1} 2026-02-19T08:31:45.158165Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037890 Cancelled read: {[1:7608491211264713214:2360], 2} 2026-02-19T08:31:45.158202Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037891 Cancelled read: {[1:7608491211264713214:2360], 3} 2026-02-19T08:31:45.158234Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037892 Cancelled read: {[1:7608491211264713214:2360], 4} 2026-02-19T08:31:45.158268Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037893 Cancelled read: {[1:7608491211264713214:2360], 5} 2026-02-19T08:31:45.158299Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037894 Cancelled read: {[1:7608491211264713214:2360], 6} 2026-02-19T08:31:45.158336Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3755: 72075186224037895 Cancelled read: {[1:7608491211264713214:2360], 7} 2026-02-19T08:31:45.159673Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NmY5MTJjMDgtNzEwZjU1YjUtYTg2OTk5ZWEtNWQ0ZmVhYTQ=, ActorId: [1:7608491206969745816:2360], ActorState: ExecuteState, LegacyTraceId: 01khtgdh28a8nc5tzytgmednk8, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-fkrokuvqke, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-02-19T08:31:45.035994Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-fkrokuvqke, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-02-19T08:31:45.035994Z } , code: 2029 query_phases { duration_us: 130593 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 1632 affected_shards: 8 } compilation { duration_us: 291596 cpu_time_us: 284353 } process_cpu_time_us: 775 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048586,\"Max\":1048586,\"Min\":1048586,\"History\":[40,1048586]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1771489905116,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1632,\"Max\":1632,\"Min\":1632,\"History\":[40,1632]},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":291596,\"CpuTimeUs\":284353},\"ProcessCpuTimeUs\":775,\"TotalDurationUs\":528949,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":101147},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"Pl ... 2026-02-19T08:32:45.354308Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:45.443799Z node 5 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:45.445841Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7608491466416864758:2081] 1771489965330848 != 1771489965330851 2026-02-19T08:32:45.470898Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:45.471017Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:45.494087Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:45.563124Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:45.586102Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:45.586129Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:45.586157Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:45.586261Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:46.097855Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:46.115989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.193995Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.341281Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:46.378822Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.461926Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.372031Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491483596735820:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.372154Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.372503Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491483596735830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.372648Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.437535Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.472914Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.506960Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.541171Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.577078Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.614381Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.656224Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.700519Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:49.770571Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491483596736698:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.770695Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.770834Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491483596736703:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.771015Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7608491483596736705:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.771071Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:49.774529Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:49.788011Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7608491483596736706:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:32:49.874552Z node 5 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [5:7608491483596736758:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:50.332643Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7608491466416864787:2069];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:50.332723Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:32:51.679361Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:54.685835Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=5&id=ZmM3ZGNiZGYtZjMxOTkyZGEtNGRiMmU5NmMtNmJmYmE2ZWE=, ActorId: [5:7608491492186671662:2538], ActorState: ExecuteState, LegacyTraceId: 01khtgfp2h8dwtcwg31fmsy3zy, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202542 > 50331648)" issue_code: 2013 severity: 1 } trace_id# >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> ScriptExecutionsTest::BackgroundOperationRestart >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoWithConsumersRetentionExtended [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> KqpService::SessionBusy >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] >> KqpQueryService::ShowCreateTableOnView >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable >> KqpQueryService::StreamExecuteQueryMultiResult >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::ShowCreateTable [GOOD] >> KqpQueryService::ShowCreateTableDisable >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> TSchemeShardTTLTests::CheckCounters [GOOD] >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::CreateTempTableDisabled >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoWithConsumersRetentionShrinked >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityBasic [GOOD] >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:37.172297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:37.172374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.172445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:37.172483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:37.172562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:37.172599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:37.172652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:37.172677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:37.172740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:37.172799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:37.173565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:37.173870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:37.234115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:37.234191Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:37.245564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:37.246176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:37.246382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:37.254862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:37.255508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:37.256035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:37.256271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:37.258740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.258925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:37.259839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:37.259882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:37.259955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:37.259992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:37.260017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:37.260124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:37.375119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.375799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.375893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.375966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:37.376638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... 5Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:33:02.189110Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:33:02.189132Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:33:02.189155Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:33:02.189180Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:33:02.189233Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-02-19T08:33:02.189843Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1068 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:33:02.189890Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:02.190025Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1068 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:33:02.190151Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1068 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:33:02.190689Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 77309414029 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:02.190730Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:02.190834Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 716 RawX2: 77309414029 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:02.190887Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:33:02.190972Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 716 RawX2: 77309414029 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:02.191040Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.191082Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.191123Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:33:02.191190Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:33:02.194193Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:33:02.195491Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:33:02.195628Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.195732Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.195997Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.196048Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:33:02.196145Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:33:02.196180Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:02.196223Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:33:02.196256Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:02.196295Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:33:02.196356Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:742:2719] message: TxId: 101 2026-02-19T08:33:02.196407Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:02.196454Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:33:02.196490Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:33:02.196605Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:33:02.199320Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:33:02.199369Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:743:2720] TestWaitNotification: OK eventTxId 101 2026-02-19T08:33:02.199818Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:33:02.200012Z node 18 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 226us result status StatusSuccess 2026-02-19T08:33:02.200493Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.639176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.639291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.639442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.639494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.639539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.639566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.639591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.639673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.639741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.640558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.641661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.734228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.734314Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.745783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.746372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.746539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.757499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.758169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.758798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.759026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.763532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.765182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.771253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.771462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.771504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.771556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.773259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.942159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.943935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.944021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.944109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.944185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.944276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.944389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... HEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.022622Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-02-19T08:33:02.022694Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:33:02.022765Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:33:02.024905Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.024987Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-02-19T08:33:02.025077Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:33:02.030485Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.030544Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.030641Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:33:02.030731Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:33:02.030957Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:33:02.032807Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:33:02.033044Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-02-19T08:33:02.033599Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.033857Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 115964119152 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:33:02.033955Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:33:02.034376Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:33:02.034471Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-02-19T08:33:02.034804Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:33:02.034931Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-02-19T08:33:02.037450Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:33:02.037548Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:33:02.037897Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:33:02.037985Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [27:212:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-02-19T08:33:02.038573Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.038674Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1:0 ProgressState 2026-02-19T08:33:02.038929Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:33:02.039001Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:33:02.039079Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:33:02.039162Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:33:02.039239Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:33:02.039330Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:33:02.039403Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:33:02.039466Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:33:02.039582Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:33:02.039658Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-02-19T08:33:02.039727Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-02-19T08:33:02.040476Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:33:02.040654Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-02-19T08:33:02.040726Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-02-19T08:33:02.040804Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-02-19T08:33:02.040889Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-02-19T08:33:02.041038Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-02-19T08:33:02.047068Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-02-19T08:33:02.047892Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-02-19T08:33:02.049332Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [27:671:2660] Bootstrap 2026-02-19T08:33:02.051680Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [27:671:2660] Become StateWork (SchemeCache [27:676:2665]) 2026-02-19T08:33:02.055749Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:33:02.056303Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:02.056468Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-02-19T08:33:02.057115Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-02-19T08:33:02.058674Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [27:671:2660] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-02-19T08:33:02.066701Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:33:02.067211Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-02-19T08:33:02.068100Z node 27 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpQueryService::AlterTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:127:2151] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:34.242621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:34.242690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.242748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:34.242782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:34.242808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:34.242847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:34.242878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:34.242903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:34.242960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:34.243023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:34.243867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:34.244079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:34.338030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:34.338092Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:34.352680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:34.353117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:34.353332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:34.361354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:34.361666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:34.362487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:34.362812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:34.365726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.365943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:34.367119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:34.367190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:34.367324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:34.367372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:34.367414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:34.367524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:34.528913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.529952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.530919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:34.531010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 2814749 ... T_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.219266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-02-19T08:33:02.219991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-02-19T08:33:02.220037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.220091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 TEST 13 2026-02-19T08:33:02.326063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7156: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-02-19T08:33:02.326183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:33:02.326293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLEnabledTableMoved, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:3, tabletId: 72075186233409548, last: now - 3610.027000s, next: now + 0.000000s, now: 2026-02-19T12:14:05.277299Z, at schemeshard: 72057594046678944 2026-02-19T08:33:02.326403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:189: TTxRunConditionalErase DoExecuteOnShard, table: /MyRoot/TTLEnabledTableMoved, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:4, tabletId: 72075186233409549, last: now - 3610.027000s, next: now + 0.000000s, now: 2026-02-19T12:14:05.277299Z, at schemeshard: 72057594046678944 2026-02-19T08:33:02.326473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:141: TTxRunConditionalErase: no more scheduled shards, path: /MyRoot/TTLEnabledTableMoved, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-02-19T08:33:02.326555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:33:02.326718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409549, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1771503245277299 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:33:02.326833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:251: Run conditional erase, tabletId: 72075186233409548, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1771503245277299 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-02-19T08:33:02.327474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:33:02.327808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7213: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:33:02.328403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.328467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-02-19T08:33:02.329121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:451: TTxScheduleConditionalErase Execute: responses: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.329161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:408: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-02-19T08:33:02.338668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.338833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:33:02.338926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTableMoved, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:3, tabletId: 72075186233409548, last: now - 0.002000s, next: now + 3599.998000s, now: 2026-02-19T12:14:05.279299Z, at schemeshard: 72057594046678944 2026-02-19T08:33:02.339279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:553: TTxScheduleConditionalErase Complete: affected tables: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:02.339357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-02-19T08:33:02.339411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:63: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-02-19T08:33:02.339471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:178: Skip conditional erase, table: /MyRoot/TTLEnabledTableMoved, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], shardIdx: 72057594046678944:3, tabletId: 72075186233409548, last: now - 0.002000s, next: now + 3599.998000s, now: 2026-02-19T12:14:05.279299Z, at schemeshard: 72057594046678944 2026-02-19T08:33:02.339517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:228: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 TEST 13.2 2026-02-19T08:33:02.361567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:33:02.416808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-02-19T08:33:02.416941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:33:02.417028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-02-19T08:33:02.417116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.417233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-02-19T08:33:02.417367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-02-19T08:33:02.417403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.417470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable TEST 13.3 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 TEST 14 2026-02-19T08:33:02.448191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:787: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-02-19T08:33:02.502789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:33:02.503128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-02-19T08:33:02.503225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.503334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-02-19T08:33:02.503536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:760: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-02-19T08:33:02.503633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-02-19T08:33:02.503674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-02-19T08:33:02.503737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:615: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 TEST 15 >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable [GOOD] >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityExtendTimeout >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] >> TBackupCollectionTests::InitCopyTableSourceDroppedSurvives >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpService::CloseSessionAbortQueryExecution >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> KqpDocumentApi::RestrictWrite >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> TableCreation::CreateOldTable [GOOD] >> KqpQueryService::CreateTempTableDisabled [GOOD] >> KqpQueryService::CreateAndDropTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:57.884992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:57.885082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.885166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:57.885212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:57.885250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:57.885284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:57.885312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:57.885337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:57.885399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:57.885473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:57.886305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:57.886623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:57.973756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:57.973833Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:57.986937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:57.990006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:57.990223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:58.001083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:58.001803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:58.002533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:58.002804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:58.005743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:58.005950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:58.007122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:58.007175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:58.007273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:58.007311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:58.007369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:58.007567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:58.143367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.144689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.144971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:58.145879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-02-19T08:33:06.011178Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:212:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-02-19T08:33:06.011294Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.011385Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1124: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-02-19T08:33:06.013352Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:33:06.013518Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:33:06.013588Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:33:06.013652Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-02-19T08:33:06.013757Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-02-19T08:33:06.015563Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:33:06.015653Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-02-19T08:33:06.015686Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-02-19T08:33:06.015720Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:33:06.015761Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:33:06.015857Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-02-19T08:33:06.022636Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1573 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:33:06.022696Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:06.022858Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1573 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-02-19T08:33:06.023059Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1573 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-02-19T08:33:06.024447Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 73014446761 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:33:06.024530Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:06.024727Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 73014446761 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:33:06.024834Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:33:06.024989Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 745 RawX2: 73014446761 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-02-19T08:33:06.025099Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:06.025165Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.025230Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:33:06.025310Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 102:0 ProposedWaitParts -> Done 2026-02-19T08:33:06.034000Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:33:06.034193Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-02-19T08:33:06.034494Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.034731Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.035147Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.035213Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 102:0 ProgressState 2026-02-19T08:33:06.035409Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:33:06.035473Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:33:06.035545Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-02-19T08:33:06.035603Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:33:06.035662Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-02-19T08:33:06.035779Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:711:2699] message: TxId: 102 2026-02-19T08:33:06.035860Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-02-19T08:33:06.035925Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-02-19T08:33:06.035978Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 102:0 2026-02-19T08:33:06.036161Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:33:06.038411Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-02-19T08:33:06.038501Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:773:2749] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-02-19T08:33:06.042223Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "value" FamilyName: "new_family" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-02-19T08:33:06.042550Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.042936Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Column families require StorageConfig specification, at schemeshard: 72057594046678944 2026-02-19T08:33:06.046828Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Column families require StorageConfig specification" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-02-19T08:33:06.047205Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column families require StorageConfig specification, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 >> TableCreation::ConcurrentUpdateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:32:28.717262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.717370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.717431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.717485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.717528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.717579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.717634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.717663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.717760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.717836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.718645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.718902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.789647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:32:28.789703Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.801034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.804145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.804289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.813020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.813761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.814415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.814671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.817229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.817386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.818342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.818393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.818503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.818548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.818586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.818723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:28.964059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.964772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.964875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.964952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.964989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:28.965647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... X_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:33:06.261681Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-02-19T08:33:06.261735Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-02-19T08:33:06.261773Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-02-19T08:33:06.261812Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-02-19T08:33:06.261897Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-02-19T08:33:06.263500Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6809: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1017 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:33:06.263549Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:06.263685Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1017 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-02-19T08:33:06.263801Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1017 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-02-19T08:33:06.266118Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 120259086992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:06.266168Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:06.266278Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 120259086992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:06.266334Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:33:06.266418Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 120259086992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-02-19T08:33:06.266482Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:06.266521Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.266564Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:33:06.266616Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 101:0 ProposedWaitParts -> Done 2026-02-19T08:33:06.269511Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:33:06.270197Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-02-19T08:33:06.270318Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.271619Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.271887Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-02-19T08:33:06.271932Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 101:0 ProgressState 2026-02-19T08:33:06.272040Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:33:06.272080Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:06.272125Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-02-19T08:33:06.272158Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:06.272213Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-02-19T08:33:06.272284Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1708: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:748:2724] message: TxId: 101 2026-02-19T08:33:06.272334Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-02-19T08:33:06.272378Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-02-19T08:33:06.272416Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 101:0 2026-02-19T08:33:06.272542Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-02-19T08:33:06.274222Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-02-19T08:33:06.274274Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:749:2725] TestWaitNotification: OK eventTxId 101 2026-02-19T08:33:06.274768Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:33:06.275012Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 275us result status StatusSuccess 2026-02-19T08:33:06.275580Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TableCreation::SimpleUpdateTable [GOOD] >> TBackupCollectionTests::InitCopyTableSourceDroppedSurvives [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2026-02-19T08:32:43.684764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491459289396723:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.685276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:44.072770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:44.072842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:44.076993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491459289396599:2081] 1771489963670885 != 1771489963670888 2026-02-19T08:32:44.094246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.132837Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:44.467926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:44.467949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:44.467963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:44.468023Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:44.698475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:44.721221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:44.728017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:46.831264Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.836848Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:46.841742Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:46.841806Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.842638Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.842655Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Creating table 2026-02-19T08:32:46.842663Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.842671Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Creating table 2026-02-19T08:32:46.842689Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.842694Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:46.842937Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.842943Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Creating table 2026-02-19T08:32:46.842975Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.848555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.852257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.855113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.863478Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:32:46.863545Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Subscribe on create table tx: 281474976715660 2026-02-19T08:32:46.864288Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:32:46.864326Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Subscribe on create table tx: 281474976715658 2026-02-19T08:32:46.866015Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:32:46.866062Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Subscribe on create table tx: 281474976715659 2026-02-19T08:32:46.868413Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Subscribe on tx: 281474976715660 registered 2026-02-19T08:32:46.868425Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Subscribe on tx: 281474976715658 registered 2026-02-19T08:32:46.868433Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Subscribe on tx: 281474976715659 registered 2026-02-19T08:32:46.947940Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2026-02-19T08:32:46.973349Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2026-02-19T08:32:46.978477Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608491472174299323:2488] Owner: [1:7608491472174299322:2487]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2026-02-19T08:32:47.021482Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2026-02-19T08:32:47.021542Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Column diff is empty, finishing 2026-02-19T08:32:47.030429Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:47.031543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:47.032545Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:32:47.032571Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7608491472174299324:2489] Owner: [1:7608491472174299322:2487]. Successful alter request: ExecComplete 2026-02-19T08:32:47.058855Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Table already exists, number of columns: 7, has SecurityObject: true 2026-02-19T08:32:47.058903Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Column diff is empty, finishing 2026-02-19T08:32:47.058980Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491472174299325:2490] Owner: [1:7608491472174299322:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:47.059938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl ... :2914], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-02-19T08:33:05.432642Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972583724.118988s seconds to be completed 2026-02-19T08:33:05.434897Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, workerId: [3:7608491554422780823:2520], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:33:05.435087Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:33:05.435460Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491554422780820:2913], ActorId: [3:7608491554422780821:2914], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph, retry_state, user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:33:05.435787Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7608491554422780823:2520] 2026-02-19T08:33:05.435822Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7608491554422780825:2915] 2026-02-19T08:33:05.444202Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 40, sender: [3:7608491554422780824:2521], selfId: [3:7608491528652975599:2270], source: [3:7608491554422780823:2520] 2026-02-19T08:33:05.445067Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491554422780820:2913], ActorId: [3:7608491554422780821:2914], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, TxId: 2026-02-19T08:33:05.445838Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491554422780820:2913], ActorId: [3:7608491554422780821:2914], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, TxId: 2026-02-19T08:33:05.445872Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2379: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491554422780820:2913], ActorId: [3:7608491554422780821:2914], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-02-19T08:33:05.446005Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491554422780819:2912], ActorId: [3:7608491554422780820:2913], TraceId: ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b, RequestDatabase: /dc-1, Got response [3:7608491554422780821:2914] SUCCESS 2026-02-19T08:33:05.446061Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2492: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608491554422780818:2911] ActorId: [3:7608491554422780819:2912] Database: /dc-1 ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b. Extracted script execution operation [3:7608491554422780821:2914], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7608491541537878333:2665], LeaseGeneration: 0 2026-02-19T08:33:05.446086Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2518: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608491554422780818:2911] ActorId: [3:7608491554422780819:2912] Database: /dc-1 ExecutionId: 87d4466d-e2a6a7e-e894664-c6e0af3b. Reply success 2026-02-19T08:33:05.446679Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=OTJlNmQ0NDktMWFkMTk3NWQtY2JmODFiNmUtNDY4YmZmNTE=, workerId: [3:7608491554422780823:2520], local sessions count: 0 2026-02-19T08:33:05.502652Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtgg0yy15s0pw2mjcz6ayfj", Request has 18444972583724.048994s seconds to be completed 2026-02-19T08:33:05.505013Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtgg0yy15s0pw2mjcz6ayfj", Created new session, sessionId: ydb://session/3?node_id=3&id=OTczZjJmNDUtNzM1MmVkOWUtNzA2NDRmMjktZjlmMTQ1YmY=, workerId: [3:7608491554422780851:2534], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:33:05.505224Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtgg0yy15s0pw2mjcz6ayfj 2026-02-19T08:33:05.520735Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtgg0zg7wy6602fwrggrbwk, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTczZjJmNDUtNzM1MmVkOWUtNzA2NDRmMjktZjlmMTQ1YmY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7608491554422780851:2534] 2026-02-19T08:33:05.520780Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7608491554422780854:2921] 2026-02-19T08:33:05.538891Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:05.544173Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtgg0zg7wy6602fwrggrbwk", Forwarded response to sender actor, requestId: 42, sender: [3:7608491554422780853:2535], selfId: [3:7608491528652975599:2270], source: [3:7608491554422780851:2534] --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:33:05.547146Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Describe result: PathErrorUnknown 2026-02-19T08:33:05.547169Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Creating table 2026-02-19T08:33:05.547206Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-02-19T08:33:05.554696Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:05.563644Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-02-19T08:33:05.563689Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Subscribe on create table tx: 281474976710687 2026-02-19T08:33:05.566177Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Subscribe on tx: 281474976710687 registered 2026-02-19T08:33:05.610882Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-02-19T08:33:05.665388Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:05.665434Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491554422780865:2928] Owner: [3:7608491554422780864:2927]. Column diff is empty, finishing 2026-02-19T08:33:05.669888Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491554422780991:3028] Owner: [3:7608491554422780990:3027]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:05.669940Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491554422780991:3028] Owner: [3:7608491554422780990:3027]. Column diff is empty, finishing 2026-02-19T08:33:05.705897Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtgg1599jmfftfsyfxk979j", Request has 18444972583723.845745s seconds to be completed 2026-02-19T08:33:05.708136Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtgg1599jmfftfsyfxk979j", Created new session, sessionId: ydb://session/3?node_id=3&id=ZWQ3Njk2MzQtOTUxZTI4YjYtZDlhZjFiOGUtNzYyOWY2ZDU=, workerId: [3:7608491554422780997:2544], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:33:05.708324Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtgg1599jmfftfsyfxk979j 2026-02-19T08:33:05.746435Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=ZWQ3Njk2MzQtOTUxZTI4YjYtZDlhZjFiOGUtNzYyOWY2ZDU=, workerId: [3:7608491554422780997:2544], local sessions count: 1 2026-02-19T08:33:05.757959Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=OTczZjJmNDUtNzM1MmVkOWUtNzA2NDRmMjktZjlmMTQ1YmY=, workerId: [3:7608491554422780851:2534], local sessions count: 0 >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::AlterTable_SetNotNull_Invalid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2026-02-19T08:32:43.675138Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491459847724820:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.675227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:43.937939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:43.938076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:43.940896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.037556Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491459847724778:2081] 1771489963671082 != 1771489963671085 2026-02-19T08:32:44.129916Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:44.463800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:44.463823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:44.463892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:44.463993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:44.686289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:44.712211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:46.679772Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.685125Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:46.685174Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:46.685198Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.687464Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.687490Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Creating table 2026-02-19T08:32:46.687557Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.687677Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.687682Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Creating table 2026-02-19T08:32:46.687706Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.687975Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.687997Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Creating table 2026-02-19T08:32:46.688011Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:46.692070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.694049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.696707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.703427Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:32:46.703482Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Subscribe on create table tx: 281474976710658 2026-02-19T08:32:46.703580Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:32:46.703596Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Subscribe on create table tx: 281474976710660 2026-02-19T08:32:46.705403Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:32:46.705447Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Subscribe on create table tx: 281474976710659 2026-02-19T08:32:46.707211Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Subscribe on tx: 281474976710658 registered 2026-02-19T08:32:46.707234Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Subscribe on tx: 281474976710660 registered 2026-02-19T08:32:46.708040Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Subscribe on tx: 281474976710659 registered 2026-02-19T08:32:46.796798Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-02-19T08:32:46.827590Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608491472732627502:2489] Owner: [1:7608491472732627499:2486]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-02-19T08:32:46.832285Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-02-19T08:32:46.849397Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Table already exists, number of columns: 6, has SecurityObject: true 2026-02-19T08:32:46.849449Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Column diff is empty, finishing 2026-02-19T08:32:46.850525Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.851399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:46.852304Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:32:46.852325Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7608491472732627501:2488] Owner: [1:7608491472732627499:2486]. Successful alter request: ExecComplete 2026-02-19T08:32:46.879711Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Table already exists, number of columns: 34, has SecurityObject: true 2026-02-19T08:32:46.879804Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Column diff is empty, finishing 2026-02-19T08:32:46.879916Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: [1:7608491472732627499:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.880919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:46.881957Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491472732627500:2487] Owner: ... Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.881729Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491558981513434:3051] Owner: [3:7608491558981513433:3050]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715697 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:06.881735Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7608491558981513434:3051] Owner: [3:7608491558981513433:3050]. Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.881807Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491558981513428:3045] Owner: [3:7608491558981513427:3044]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715696 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:06.881813Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7608491558981513428:3045] Owner: [3:7608491558981513427:3044]. Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.887219Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491558981513446:3063] txid# 281474976715689, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-02-19T08:33:06.887361Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491558981513450:3067] txid# 281474976715693, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-02-19T08:33:06.887433Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491558981513448:3065] txid# 281474976715691, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-02-19T08:33:06.887475Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7608491558981513416:3033] Owner: [3:7608491558981513415:3032]. Subscribe on tx: 281474976715688 registered 2026-02-19T08:33:06.887663Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491558981513418:3035] Owner: [3:7608491558981513417:3034]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715689 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:06.887674Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7608491558981513418:3035] Owner: [3:7608491558981513417:3034]. Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.887769Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491558981513426:3043] Owner: [3:7608491558981513425:3042]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715693 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:06.887774Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7608491558981513426:3043] Owner: [3:7608491558981513425:3042]. Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.887843Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491558981513422:3039] Owner: [3:7608491558981513421:3038]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715691 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:06.887849Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7608491558981513422:3039] Owner: [3:7608491558981513421:3038]. Unable to subscribe to concurrent transaction, falling back 2026-02-19T08:33:06.912739Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7608491558981513416:3033] Owner: [3:7608491558981513415:3032]. Request: alter. Transaction completed: 281474976715688. Doublechecking... 2026-02-19T08:33:06.937225Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513424:3041] Owner: [3:7608491558981513423:3040]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.937268Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513424:3041] Owner: [3:7608491558981513423:3040]. Column diff is empty, finishing 2026-02-19T08:33:06.941891Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513418:3035] Owner: [3:7608491558981513417:3034]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.941937Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513418:3035] Owner: [3:7608491558981513417:3034]. Column diff is empty, finishing 2026-02-19T08:33:06.941990Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513420:3037] Owner: [3:7608491558981513419:3036]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.942023Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513420:3037] Owner: [3:7608491558981513419:3036]. Column diff is empty, finishing 2026-02-19T08:33:06.956355Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513430:3047] Owner: [3:7608491558981513429:3046]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.956395Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513430:3047] Owner: [3:7608491558981513429:3046]. Column diff is empty, finishing 2026-02-19T08:33:06.959477Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513434:3051] Owner: [3:7608491558981513433:3050]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.959514Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513434:3051] Owner: [3:7608491558981513433:3050]. Column diff is empty, finishing 2026-02-19T08:33:06.961235Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513428:3045] Owner: [3:7608491558981513427:3044]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.961271Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513428:3045] Owner: [3:7608491558981513427:3044]. Column diff is empty, finishing 2026-02-19T08:33:06.970706Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513416:3033] Owner: [3:7608491558981513415:3032]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.970750Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513416:3033] Owner: [3:7608491558981513415:3032]. Column diff is empty, finishing 2026-02-19T08:33:06.970793Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513426:3043] Owner: [3:7608491558981513425:3042]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.970802Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513426:3043] Owner: [3:7608491558981513425:3042]. Column diff is empty, finishing 2026-02-19T08:33:06.970823Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513422:3039] Owner: [3:7608491558981513421:3038]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.970833Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513422:3039] Owner: [3:7608491558981513421:3038]. Column diff is empty, finishing 2026-02-19T08:33:06.982353Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491558981513432:3049] Owner: [3:7608491558981513431:3048]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:06.982391Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491558981513432:3049] Owner: [3:7608491558981513431:3048]. Column diff is empty, finishing 2026-02-19T08:33:07.034878Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtgg2et625whtxy7kw89gts", Request has 18444972583722.516772s seconds to be completed 2026-02-19T08:33:07.037153Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtgg2et625whtxy7kw89gts", Created new session, sessionId: ydb://session/3?node_id=3&id=ODlkOTU4ZmEtNTllZWE0NWItM2E0YWNhZTgtOGVlMmQwNTA=, workerId: [3:7608491563276480830:2547], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:33:07.037341Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtgg2et625whtxy7kw89gts 2026-02-19T08:33:07.072838Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=ODlkOTU4ZmEtNTllZWE0NWItM2E0YWNhZTgtOGVlMmQwNTA=, workerId: [3:7608491563276480830:2547], local sessions count: 1 2026-02-19T08:33:07.073813Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=MTUwZTcyZjktM2UzMTQwZjgtZGYyZDg5N2MtMjYxYjI3YjI=, workerId: [3:7608491558981513276:2534], local sessions count: 0 >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> KqpQueryService::ShowCreateTableOnView [GOOD] >> KqpQueryService::ShowCreateView >> KqpQueryService::ShowCreateTableDisable [GOOD] >> KqpQueryService::ShowCreateSysView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2026-02-19T08:32:45.413767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491468960578685:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:45.413863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:45.670518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491468960578654:2081] 1771489965411995 != 1771489965411998 2026-02-19T08:32:45.709257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:45.709371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:45.712281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:45.733900Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:45.918319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:45.918347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:45.918356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:45.918451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:45.965506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:46.419322Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:48.098746Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:48.105690Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:48.105742Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:48.105758Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:48.111178Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Describe result: PathErrorUnknown 2026-02-19T08:32:48.111218Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Creating table 2026-02-19T08:32:48.111258Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:48.111296Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Describe result: PathErrorUnknown 2026-02-19T08:32:48.111305Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Creating table 2026-02-19T08:32:48.111352Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:48.111386Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Describe result: PathErrorUnknown 2026-02-19T08:32:48.111391Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Creating table 2026-02-19T08:32:48.111413Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:48.115597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:48.117541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:48.120141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:48.125656Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:32:48.125661Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:32:48.125698Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Subscribe on create table tx: 281474976710659 2026-02-19T08:32:48.125698Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Subscribe on create table tx: 281474976710658 2026-02-19T08:32:48.125767Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:32:48.125780Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Subscribe on create table tx: 281474976710660 2026-02-19T08:32:48.128535Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Subscribe on tx: 281474976710659 registered 2026-02-19T08:32:48.128579Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Subscribe on tx: 281474976710658 registered 2026-02-19T08:32:48.128591Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Subscribe on tx: 281474976710660 registered 2026-02-19T08:32:48.200469Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-02-19T08:32:48.225099Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-02-19T08:32:48.225759Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608491481845481376:2487] Owner: [1:7608491481845481374:2485]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-02-19T08:32:48.287923Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Table already exists, number of columns: 34, has SecurityObject: true 2026-02-19T08:32:48.288018Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Column diff is empty, finishing 2026-02-19T08:32:48.289024Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:48.290042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:48.290299Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Table already exists, number of columns: 7, has SecurityObject: true 2026-02-19T08:32:48.290334Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Column diff is empty, finishing 2026-02-19T08:32:48.290386Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:48.290978Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:32:48.291003Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7608491481845481375:2486] Owner: [1:7608491481845481374:2485]. Successful alter request: ExecComplete 2026-02-19T08:32:48.291574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:48.292944Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491481845481377:2488] Owner: [1:7608491481845481374:2485]. TEvProposeTransactionStatus: { Statu ... SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:33:07.341764Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YmFhMjYwZmEtMmI2MDFkNmYtNGE4YzhhYTUtN2ViNzZmMg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 32, targetId: [3:7608491561528054821:2484] 2026-02-19T08:33:07.341796Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 32 timeout: 300.000000s actor id: [3:7608491561528054823:2877] 2026-02-19T08:33:07.360949Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 32, sender: [3:7608491561528054822:2485], selfId: [3:7608491531463282330:2215], source: [3:7608491561528054821:2484] 2026-02-19T08:33:07.361841Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491561528054818:2875], ActorId: [3:7608491561528054819:2876], TraceId: ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YmFhMjYwZmEtMmI2MDFkNmYtNGE4YzhhYTUtN2ViNzZmMg==, TxId: 2026-02-19T08:33:07.362480Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491561528054818:2875], ActorId: [3:7608491561528054819:2876], TraceId: ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YmFhMjYwZmEtMmI2MDFkNmYtNGE4YzhhYTUtN2ViNzZmMg==, TxId: 2026-02-19T08:33:07.362514Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2379: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491561528054818:2875], ActorId: [3:7608491561528054819:2876], TraceId: ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-02-19T08:33:07.362637Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7608491561528054817:2874], ActorId: [3:7608491561528054818:2875], TraceId: ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f, RequestDatabase: /dc-1, Got response [3:7608491561528054819:2876] SUCCESS 2026-02-19T08:33:07.362703Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2492: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608491561528054816:2873] ActorId: [3:7608491561528054817:2874] Database: /dc-1 ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f. Extracted script execution operation [3:7608491561528054819:2876], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7608491548643152433:2665], LeaseGeneration: 0 2026-02-19T08:33:07.362730Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2518: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7608491561528054816:2873] ActorId: [3:7608491561528054817:2874] Database: /dc-1 ExecutionId: 2c36e0ff-61f5a842-f90de57c-3ef5cd2f. Reply success 2026-02-19T08:33:07.368051Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=YmFhMjYwZmEtMmI2MDFkNmYtNGE4YzhhYTUtN2ViNzZmMg==, workerId: [3:7608491561528054821:2484], local sessions count: 0 2026-02-19T08:33:07.408940Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtgg2tg8wj8dvsg0r4a2e5j", Request has 18444972583722.142714s seconds to be completed 2026-02-19T08:33:07.411190Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtgg2tg8wj8dvsg0r4a2e5j", Created new session, sessionId: ydb://session/3?node_id=3&id=ZTAxYzg4NDEtOWEwZTA3NjYtZDZkZjExNjYtMTQ5MjQ3ZA==, workerId: [3:7608491561528054849:2498], database: /dc-1, longSession: 1, local sessions count: 1 2026-02-19T08:33:07.411368Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtgg2tg8wj8dvsg0r4a2e5j 2026-02-19T08:33:07.428937Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtgg2v4a11bzzaj3sk23871, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZTAxYzg4NDEtOWEwZTA3NjYtZDZkZjExNjYtMTQ5MjQ3ZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 34, targetId: [3:7608491561528054849:2498] 2026-02-19T08:33:07.428987Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 34 timeout: 600.000000s actor id: [3:7608491561528054852:2883] 2026-02-19T08:33:07.442286Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:07.448490Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtgg2v4a11bzzaj3sk23871", Forwarded response to sender actor, requestId: 34, sender: [3:7608491561528054851:2499], selfId: [3:7608491531463282330:2215], source: [3:7608491561528054849:2498] --------------------------- INIT FINISHED --------------------------- 2026-02-19T08:33:07.474371Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Describe result: PathErrorUnknown 2026-02-19T08:33:07.474397Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Creating table 2026-02-19T08:33:07.474438Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-02-19T08:33:07.480984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.484755Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715683 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-02-19T08:33:07.484795Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Subscribe on create table tx: 281474976715683 2026-02-19T08:33:07.488903Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Subscribe on tx: 281474976715683 registered 2026-02-19T08:33:07.524054Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Request: create. Transaction completed: 281474976715683. Doublechecking... 2026-02-19T08:33:07.586021Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Table already exists, number of columns: 3, has SecurityObject: true 2026-02-19T08:33:07.586056Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491561528054863:2890] Owner: [3:7608491561528054862:2889]. Column diff is empty, finishing 2026-02-19T08:33:07.586591Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Table already exists, number of columns: 3, has SecurityObject: true 2026-02-19T08:33:07.586626Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-02-19T08:33:07.586676Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-02-19T08:33:07.588532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:33:07.593117Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715684 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:33:07.593165Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Subscribe on create table tx: 281474976715684 2026-02-19T08:33:07.593412Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Subscribe on tx: 281474976715684 registered 2026-02-19T08:33:07.607642Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Request: alter. Transaction completed: 281474976715684. Doublechecking... 2026-02-19T08:33:07.685315Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Table already exists, number of columns: 5, has SecurityObject: true 2026-02-19T08:33:07.685355Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7608491561528054989:2990] Owner: [3:7608491561528054988:2989]. Column diff is empty, finishing 2026-02-19T08:33:07.719970Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: TraceId: "01khtgg3476xn4wfjyvw99fdxq", Request has 18444972583721.831677s seconds to be completed 2026-02-19T08:33:07.722054Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: TraceId: "01khtgg3476xn4wfjyvw99fdxq", Created new session, sessionId: ydb://session/3?node_id=3&id=ZjgwNjQ0NjctZWU4N2I1OTItNmE2NzMwODAtN2JhZWQ1ZGE=, workerId: [3:7608491561528055021:2509], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:33:07.722227Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 01khtgg3476xn4wfjyvw99fdxq 2026-02-19T08:33:07.758635Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjgwNjQ0NjctZWU4N2I1OTItNmE2NzMwODAtN2JhZWQ1ZGE=, workerId: [3:7608491561528055021:2509], local sessions count: 1 2026-02-19T08:33:07.768447Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=3&id=ZTAxYzg4NDEtOWEwZTA3NjYtZDZkZjExNjYtMTQ5MjQ3ZA==, workerId: [3:7608491561528054849:2498], local sessions count: 0 >> KqpService::SessionBusy [GOOD] >> KqpService::RangeCache-UseCache >> KqpQueryService::ExecStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::InitCopyTableSourceDroppedSurvives [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:129:2153] sender: [1:131:2058] recipient: [1:113:2143] 2026-02-19T08:31:52.933051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:31:52.933148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:52.933213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:31:52.933251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:31:52.933319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:31:52.933367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:31:52.933404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:31:52.933440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:31:52.933528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:31:52.933599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:31:52.934562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:31:52.935853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:31:53.018405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7914: Cannot subscribe to console configs 2026-02-19T08:31:53.018482Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:31:53.033500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:31:53.034239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:31:53.035143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:31:53.048215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:31:53.048988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:31:53.050805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:31:53.053063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:31:53.063981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:53.065074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:31:53.074947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:31:53.075057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:31:53.075213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:31:53.075282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:31:53.075407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:31:53.075664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:31:53.223567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:31:53.225829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 28147 ... pl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-02-19T08:33:08.107595Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__init.cpp:2228: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 39], TabletType: DataShard, at schemeshard: 72057594046678944 2026-02-19T08:33:08.107627Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-02-19T08:33:08.107838Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2304: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2026-02-19T08:33:08.108191Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2370: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.108526Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2521: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2026-02-19T08:33:08.109111Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2900: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.109326Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2979: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.109998Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3480: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.110154Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3516: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.110391Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init.cpp:3735: Adjusted PathState, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], name: Table1Copy, state: EPathStateCreate, txId: 102, TxType: TxCopyTable, LastTxId: 102 2026-02-19T08:33:08.110854Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3763: TTxInit for TxShards, read records: 2, at schemeshard: 72057594046678944 2026-02-19T08:33:08.111134Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3910: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.111258Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3927: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.111438Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3944: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.111766Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4105: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.111942Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4121: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.112693Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4429: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.113182Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4784: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.113329Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4850: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.113447Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4909: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.118062Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4998: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.118238Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5025: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.118366Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5052: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.118820Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5346: TTxInit for TxInFlight execute ProgressState for all parts , txId: 102, parts: 1, await num: 0, dependent num: 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.118929Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 102:0 ProgressState at tablet# 72057594046678944 2026-02-19T08:33:08.119067Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 102:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 102:0 seqNo# 3:1 at tablet# 72057594046678944 2026-02-19T08:33:08.120050Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-02-19T08:33:08.161152Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:33:08.161294Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-02-19T08:33:08.161549Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-02-19T08:33:08.161733Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409547 2026-02-19T08:33:08.161819Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409546 2026-02-19T08:33:08.162061Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:33:08.172209Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 2146435083, Sender [25:855:2809], Recipient [25:855:2809]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:33:08.172294Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5362: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-02-19T08:33:08.175621Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:33:08.175758Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:33:08.176620Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:33:08.176747Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:33:08.176838Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:33:08.176924Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-02-19T08:33:08.177151Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 274399233, Sender [25:890:2809], Recipient [25:855:2809]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:33:08.177207Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5476: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-02-19T08:33:08.177288Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:33:08.178763Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877760, Sender [25:905:2847], Recipient [25:855:2809]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72075186233409546 Status: OK ServerId: [25:911:2853] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-02-19T08:33:08.178832Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5394: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-02-19T08:33:08.178915Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6201: Handle TEvClientConnected, tabletId: 72075186233409546, status: OK, at schemeshard: 72057594046678944 2026-02-19T08:33:08.216257Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 269877760, Sender [25:904:2846], Recipient [25:855:2809]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72075186233409547 Status: OK ServerId: [25:910:2852] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-02-19T08:33:08.219954Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5394: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-02-19T08:33:08.220116Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6201: Handle TEvClientConnected, tabletId: 72075186233409547, status: OK, at schemeshard: 72057594046678944 ... blocking NKikimr::TEvDataShard::TEvProposeTransactionResult from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 1 ... blocking NKikimr::TEvDataShard::TEvProposeTransactionResult from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 2 Leader for TabletID 72057594046678944 is [25:855:2809] sender: [25:922:2058] recipient: [25:15:2062] 2026-02-19T08:33:08.284907Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5298: StateWork, received event# 271122945, Sender [25:921:2861], Recipient [25:855:2809]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-02-19T08:33:08.285040Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-02-19T08:33:08.285281Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:33:08.285697Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 433us result status StatusPathDoesNotExist 2026-02-19T08:33:08.286079Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 1000000, drop txId: 0" Path: "/MyRoot/Table1" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryService::TableSink_Olap_Replace >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoWithConsumersRetentionShrinked [GOOD] >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::ReturnAndCloseSameTime >> KqpQueryService::ExecuteQueryPure |95.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ReplaceIntoWithDefaultValue >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged+UseStreamIndex >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions |95.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueAttributesFifoWithConsumersRetentionShrinked [GOOD] Test command err: 2026-02-19T08:32:21.538189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491365984376533:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:21.541530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:21.798309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:21.841485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:21.841648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:21.897860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:21.915961Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:21.917216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491365984376490:2081] 1771489941529961 != 1771489941529964 2026-02-19T08:32:22.076226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:22.117655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:22.117689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:22.117698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:22.117795Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:22.500407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:22.545852Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:22.672016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:32:22.677196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:22.748967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.874234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.925432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.965585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.997879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.026671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.055598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.084209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.119580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.151868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:24.583683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491378869280033:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.583694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491378869280041:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.583807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.584155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491378869280048:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.584348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.587713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:24.598541Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491378869280047:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:32:24.669094Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491378869280100:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:25.222047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.250546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.276050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.299482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.325865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.352030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... request [GetQueueAttributes] requestId [b70eb7d6-51d34e2c-ca246c97-66f4935c] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:33:10.616017Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [b70eb7d6-51d34e2c-ca246c97-66f4935c] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:33:10.616118Z node 7 :SQS INFO: http_req.cpp:1237: Request [b70eb7d6-51d34e2c-ca246c97-66f4935c] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [38cf:a595:d87b:0:20cf:a595:d87b:0] Duration [1ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:33:10.616135Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [b70eb7d6-51d34e2c-ca246c97-66f4935c] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:33:10.616344Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:45738) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:33:10.616394Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:45738) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:33:10.616423Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:45738) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: b70eb7d6-51d34e2c-ca246c97-66f4935c Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:33:10.616492Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:45738) connection closed 2026-02-19T08:33:10.617081Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:45742) incoming connection opened 2026-02-19T08:33:10.617155Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:45742) -> (POST /Root, 143 bytes) 2026-02-19T08:33:10.617287Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [98c9:6395:d87b:0:80c9:6395:d87b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 2cbfb621-a102b45-5f26a1be-916cabb9 2026-02-19T08:33:10.617562Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [2cbfb621-a102b45-5f26a1be-916cabb9] got new request from [98c9:6395:d87b:0:80c9:6395:d87b:0] database '/Root' stream '' 2026-02-19T08:33:10.618025Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [2cbfb621-a102b45-5f26a1be-916cabb9] [auth] Authorized successfully 2026-02-19T08:33:10.618068Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [2cbfb621-a102b45-5f26a1be-916cabb9] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:33:10.619367Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [2cbfb621-a102b45-5f26a1be-916cabb9] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:33:10.619472Z node 7 :SQS INFO: http_req.cpp:1237: Request [2cbfb621-a102b45-5f26a1be-916cabb9] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [98c9:6395:d87b:0:80c9:6395:d87b:0] Duration [4ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:33:10.619489Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [2cbfb621-a102b45-5f26a1be-916cabb9] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:33:10.619701Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:45742) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:33:10.619752Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#37,[::1]:45742) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "All", "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:33:10.619781Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#37,[::1]:45742) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 2cbfb621-a102b45-5f26a1be-916cabb9 Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:33:10.619844Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:45742) connection closed 2026-02-19T08:33:10.620641Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#40,[::1]:45756) incoming connection opened 2026-02-19T08:33:10.620701Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#40,[::1]:45756) -> (POST /Root, 152 bytes) 2026-02-19T08:33:10.620832Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b809:7794:d87b:0:a009:7794:d87b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: ff2f866-a40faaab-f4ae4c17-59f147c8 2026-02-19T08:33:10.621173Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [ff2f866-a40faaab-f4ae4c17-59f147c8] got new request from [b809:7794:d87b:0:a009:7794:d87b:0] database '/Root' stream '' 2026-02-19T08:33:10.621721Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [ff2f866-a40faaab-f4ae4c17-59f147c8] [auth] Authorized successfully 2026-02-19T08:33:10.621768Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [ff2f866-a40faaab-f4ae4c17-59f147c8] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:33:10.622598Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1315: http request [GetQueueAttributes] requestId [ff2f866-a40faaab-f4ae4c17-59f147c8] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-02-19T08:33:10.622699Z node 7 :SQS INFO: http_req.cpp:1237: Request [ff2f866-a40faaab-f4ae4c17-59f147c8] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [b809:7794:d87b:0:a009:7794:d87b:0] Duration [2ms] Code [400] Response [Invalid attribute name "UnknownAttribute"] 2026-02-19T08:33:10.622716Z node 7 :HTTP_PROXY INFO: http_req.cpp:1671: http request [GetQueueAttributes] requestId [ff2f866-a40faaab-f4ae4c17-59f147c8] reply with status: STATUS_UNDEFINED message: Invalid attribute name "UnknownAttribute" 2026-02-19T08:33:10.622916Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#40,[::1]:45756) <- (400 InvalidParameterValue, 90 bytes) 2026-02-19T08:33:10.622968Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:340: (#40,[::1]:45756) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.GetQueueAttributes X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "AttributeNames": [ "DelaySeconds", "UnknownAttribute" ], "QueueUrl":"/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0" } 2026-02-19T08:33:10.622996Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:347: (#40,[::1]:45756) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: ff2f866-a40faaab-f4ae4c17-59f147c8 Content-Type: application/x-amz-json-1.1 Content-Length: 90 2026-02-19T08:33:10.623063Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#40,[::1]:45756) connection closed Http output full {"__type":"InvalidParameterValue","message":"Invalid attribute name \"UnknownAttribute\""} 2026-02-19T08:33:10.624173Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:105: (#37,[::1]:45768) incoming connection opened 2026-02-19T08:33:10.624238Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:197: (#37,[::1]:45768) -> (POST /Root, 501 bytes) 2026-02-19T08:33:10.624370Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b87c:8795:d87b:0:a07c:8795:d87b:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: cfbeef6c-9d7f621e-2392567-7081e0f 2026-02-19T08:33:10.624799Z node 7 :HTTP_PROXY INFO: http_req.cpp:1385: http request [GetQueueAttributes] requestId [cfbeef6c-9d7f621e-2392567-7081e0f] got new request from [b87c:8795:d87b:0:a07c:8795:d87b:0] database '/Root' stream '' 2026-02-19T08:33:10.625124Z node 7 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [GetQueueAttributes] requestId [cfbeef6c-9d7f621e-2392567-7081e0f] [auth] Authorized successfully 2026-02-19T08:33:10.625150Z node 7 :HTTP_PROXY INFO: http_req.cpp:1117: http request [GetQueueAttributes] requestId [cfbeef6c-9d7f621e-2392567-7081e0f] sending grpc request to '' database: '/Root' iam token size: 0 2026-02-19T08:33:10.626260Z node 7 :SQS INFO: http_req.cpp:1237: Request [cfbeef6c-9d7f621e-2392567-7081e0f] Request done. Api [SqsTopic] Action [GetQueueAttributes] User [AKIDEXAMPLE] Database [/Root] Queue [ExampleQueueName] IP [b87c:8795:d87b:0:a07c:8795:d87b:0] Duration [2ms] Code [200] 2026-02-19T08:33:10.626284Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [GetQueueAttributes] requestId [cfbeef6c-9d7f621e-2392567-7081e0f] reply ok 2026-02-19T08:33:10.626417Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#37,[::1]:45768) <- (200 , 479 bytes) 2026-02-19T08:33:10.626501Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#37,[::1]:45768) connection closed Http output full {"Attributes":{"FifoQueue":"true","MaximumMessageSize":"262144","DelaySeconds":"0","CreatedTimestamp":"1771489990","VisibilityTimeout":"25","RedrivePolicy":"null","QueueArn":"yrn:yc:ymq:ru-central1::/v1/5//Root/16/ExampleQueueName/18/ydb-sqs-consumer-0","ApproximateNumberOfMessagesDelayed":"0","ApproximateNumberOfMessages":"0","MessageRetentionPeriod":"36000","ContentBasedDeduplication":"false","ReceiveMessageWaitTimeSeconds":"0","ApproximateNumberOfMessagesNotVisible":"0"}} 2026-02-19T08:33:10.633548Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:10.633580Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:10.633594Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:10.633614Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:10.633626Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:10.734196Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:10.734224Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:10.734238Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:10.734256Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:10.734268Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist >> KqpQueryServiceScripts::TestPaging >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::AlterTable_SetNotNull_Invalid [GOOD] >> KqpQueryService::AlterTable_SetNotNull_Valid >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic >> KqpQueryService::TempTablesDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: 2026-02-19T08:32:56.135974Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491515887723800:2257];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:56.136033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:56.186576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:56.595005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.595171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.620625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.622240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.702047Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.702386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491515887723572:2081] 1771489976093094 != 1771489976093097 2026-02-19T08:32:56.969818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:57.002422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:57.002454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:57.002463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:57.002551Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:57.134280Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.517648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.526906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:59.621528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528772626362:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.621657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.622019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528772626372:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.622064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.884622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.054960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491533067593761:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.055080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.055249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491533067593766:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.055311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491533067593768:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.055368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.059236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.069875Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491533067593770:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:00.155577Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491533067593821:2594] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.790677Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491533067593909:2375], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2026-02-19T08:33:00.792411Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MTE3NTNmMjEtYmY5YzgwZGMtOTZjNzVmNjYtNDM5ZDJjNDk=, ActorId: [1:7608491533067593907:2374], ActorState: ExecuteState, LegacyTraceId: 01khtgfwas4axt1aayvbgccrjg, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 29 } message: "At function: KiWriteTable!" end_position { row: 2 column: 29 } severity: 1 issues { position { row: 2 column: 29 } message: "Missing key column in input: Col1 for table: /Root/DataShard" end_position { row: 2 column: 29 } issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:01.745974Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491533855452638:2135];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:01.746018Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:01.798436Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:01.901597Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491533855452543:2081] 1771489981737084 != 1771489981737087 2026-02-19T08:33:01.912290Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:01.912348Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:01.918380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:01.948937Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:33:01.948969Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:579: SchemeBoardDelete /Root Strong=0 2026-02-19T08:33:01.953674Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:02.000078Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:02.011745Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:02.011768Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:02.011775Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:02.011874Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:02.395945Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:02.429918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:02.434661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:02.758529Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected eve ... NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.296388Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491551035322629:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.296434Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.337753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:05.410017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491551035322723:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.410110Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.410451Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491551035322728:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.410498Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491551035322729:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.410597Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:05.414353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:05.430518Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491551035322732:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:05.513366Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491551035322783:2593] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:07.518274Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491560905259241:2205];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:07.518413Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:07.529185Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:07.645934Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:07.646695Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:07.646768Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:07.665847Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491560905259066:2081] 1771489987506230 != 1771489987506233 2026-02-19T08:33:07.669929Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:07.681273Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:07.822662Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:07.822684Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:07.822691Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:07.822776Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:07.884085Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:08.330961Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:08.348630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:08.521846Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:11.014823Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491578085129137:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.014912Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.015306Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491578085129147:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.015350Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.050938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.306438Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.573785Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491578085130468:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.573881Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.574326Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491578085130473:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.574451Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491578085130474:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.574525Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.578834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:11.590710Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491578085130477:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:33:11.655182Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491578085130528:3393] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:12.517795Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491560905259241:2205];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:12.517863Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2026-02-19T08:32:43.694219Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491459199692918:2230];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.694413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:43.733830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:44.102576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:44.102688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:44.167694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-02-19T08:32:44.171333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.219370Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:44.221213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491459199692707:2081] 1771489963670936 != 1771489963670939 2026-02-19T08:32:44.454469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:32:44.510670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:44.523148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:44.697951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:46.848005Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.868241Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=1&id=NDdjMWMyNjItNDY3MGQyOTUtOTU5MGM0ZGQtMjRjY2QwMDc=, workerId: [1:7608491472084595448:2308], database: , longSession: 0, local sessions count: 1 2026-02-19T08:32:46.868650Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=NDdjMWMyNjItNDY3MGQyOTUtOTU5MGM0ZGQtMjRjY2QwMDc=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7608491472084595448:2308] 2026-02-19T08:32:46.868681Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2026-02-19T08:32:46.871850Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:46.871910Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:46.871933Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.873851Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=1&id=NDdjMWMyNjItNDY3MGQyOTUtOTU5MGM0ZGQtMjRjY2QwMDc=, ActorId: [1:7608491472084595448:2308], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxy_request_id# 2 trace_id# 2026-02-19T08:32:46.874054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491472084595449:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:46.874085Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 2, sender: [1:7608491463494660798:2482], selfId: [1:7608491459199692973:2270], source: [1:7608491472084595448:2308] 2026-02-19T08:32:46.874153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:46.874505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491472084595458:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:46.874566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:46.876911Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1218: Handle TEvPrivate::TEvOnRequestTimeout(2) 2026-02-19T08:32:46.876932Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1221: Invalid request info while on request timeout handle. RequestId: 2 ... waiting for SysViewsRoster update finished 2026-02-19T08:32:53.375182Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:53.375623Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:690:2352], Scheduled retry for error: {
: Error: Scheme service not found } 2026-02-19T08:32:53.376466Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:53.385411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:53.386685Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:32:53.388433Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2411], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:32:53.388814Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:53.389039Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:53.390708Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:32:53.390760Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:32:53.681907Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:53.786605Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:53.786734Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:53.787553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:53.787654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:53.832957Z node 2 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-02-19T08:32:53.833657Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:53.834090Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:53.920421Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-02-19T08:32:53.933666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1503:2986] 2026-02-19T08:32:54.735901Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=3&id=Mjg1ZjNiMGYtYmYyMDE5NTUtZjc2ODZhOTctMWJjNjhkMTU=, workerId: [3:1504:2380], database: , longSession: 1, local sessions count: 1 2026-02-19T08:32:54.736189Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=Mjg1ZjNiMGYtYmYyMDE5NTUtZjc2ODZhOTctMWJjNjhkMTU= 2026-02-19T08:32:54.736897Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=Mjg1ZjNiMGYtYmYyMDE5NTUtZjc2ODZhOTctMWJjNjhkMTU=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2026-02-19T08:32:54.736966Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2026-02-19T08:32:54.737330Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=Mjg1ZjNiMGYtYmYyMDE5NTUtZjc2ODZhOTctMWJjNjhkMTU=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1504:2380] 2026-02-19T08:32:54.737372Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2026-02-19T08:32:55.024005Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1505:2987], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: E ... 2026-02-19T08:33:12.332632Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=MjBlYWEwODMtYWIzMmJmYTYtMjUwNzY3MjAtZDg3Y2Q4Yzk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 48, targetId: [7:7608491581127880363:2571] 2026-02-19T08:33:12.332663Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 48 timeout: 300.000000s actor id: [7:7608491581127880382:3016] 2026-02-19T08:33:12.344703Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 48, sender: [7:7608491581127880381:2578], selfId: [7:7608491542473172959:2164], source: [7:7608491581127880363:2571] 2026-02-19T08:33:12.345591Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7608491581127880359:2568], ActorId: [7:7608491581127880361:2569], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=7&id=MjBlYWEwODMtYWIzMmJmYTYtMjUwNzY3MjAtZDg3Y2Q4Yzk=, TxId: 2026-02-19T08:33:12.345698Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7608491581127880359:2568], ActorId: [7:7608491581127880361:2569], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=MjBlYWEwODMtYWIzMmJmYTYtMjUwNzY3MjAtZDg3Y2Q4Yzk=, TxId: 2026-02-19T08:33:12.345788Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4347: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7608491581127880359:2568], ActorId: [7:7608491581127880361:2569], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-02-19T08:33:12.346002Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [7:7608491581127880358:2567], ActorId: [7:7608491581127880359:2568], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7608491581127880361:2569] SUCCESS 2026-02-19T08:33:12.346216Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=7&id=MjBlYWEwODMtYWIzMmJmYTYtMjUwNzY3MjAtZDg3Y2Q4Yzk=, workerId: [7:7608491581127880363:2571], local sessions count: 1 2026-02-19T08:33:12.346653Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1476: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7608491576832913020:2993] ActorId: [7:7608491576832913021:2994] Database: /dc-1 ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486. Successfully finalized script execution operation, WaitingRetry: 0 2026-02-19T08:33:12.346706Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1823: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7608491576832913020:2993] ActorId: [7:7608491576832913021:2994] Database: /dc-1 ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486. Reply success 2026-02-19T08:33:12.356427Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtgg7n3d9kxy27dva98sxbh, Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ZWUwNTQxNjAtODA1ZTE2ZmUtNWZmYjUyNjktNWFjYjA1Y2E=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 49, targetId: [7:7608491568242978299:2525] 2026-02-19T08:33:12.356479Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 49 timeout: 300.000000s actor id: [7:7608491581127880405:3023] 2026-02-19T08:33:13.087822Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtgg7n3d9kxy27dva98sxbh", Forwarded response to sender actor, requestId: 49, sender: [7:7608491581127880404:2583], selfId: [7:7608491542473172959:2164], source: [7:7608491568242978299:2525] 2026-02-19T08:33:13.094224Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:851: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7608491585422847740:3040] ActorId: [7:7608491585422847741:3041] Database: /dc-1 ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486. Bootstrap. Start TLeaseUpdateRetryActor [7:7608491585422847742:3042] 2026-02-19T08:33:13.094258Z node 7 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847741:3041], ActorId: [7:7608491585422847742:3042], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [7:7608491585422847743:3043] 2026-02-19T08:33:13.094294Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-02-19T08:33:13.094427Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1372: Request has 18444972583716.457199s seconds to be completed 2026-02-19T08:33:13.095844Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1447: Created new session, sessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, workerId: [7:7608491585422847745:2598], database: /dc-1, longSession: 1, local sessions count: 2 2026-02-19T08:33:13.095963Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:623: Received create session request, trace_id: 2026-02-19T08:33:13.097812Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:713: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2026-02-19T08:33:13.097970Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:33:13.098252Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 51, targetId: [7:7608491585422847745:2598] 2026-02-19T08:33:13.098280Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 51 timeout: 300.000000s actor id: [7:7608491585422847747:3044] 2026-02-19T08:33:13.338648Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 51, sender: [7:7608491585422847746:2599], selfId: [7:7608491542473172959:2164], source: [7:7608491585422847745:2598] 2026-02-19T08:33:13.339139Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, TxId: 01khtgg8kk0328s1ghee871rtj 2026-02-19T08:33:13.339257Z node 7 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, TxId: 01khtgg8kk0328s1ghee871rtj 2026-02-19T08:33:13.339311Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:432: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01khtgg8kk0328s1ghee871rtj in session: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA= 2026-02-19T08:33:13.339479Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847741:3041], ActorId: [7:7608491585422847742:3042], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7608491585422847743:3043] NOT_FOUND 2026-02-19T08:33:13.339571Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:861: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7608491585422847740:3040] ActorId: [7:7608491585422847741:3041] Database: /dc-1 ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486. Lease update [7:7608491585422847743:3043] finished NOT_FOUND, issues: {
: Error: No such execution } 2026-02-19T08:33:13.341960Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 52, targetId: [7:7608491585422847745:2598] 2026-02-19T08:33:13.342010Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 52 timeout: 600.000000s actor id: [7:7608491585422847768:3053] 2026-02-19T08:33:13.348225Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 52, sender: [7:7608491585422847767:2606], selfId: [7:7608491542473172959:2164], source: [7:7608491585422847745:2598] 2026-02-19T08:33:13.348466Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:441: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7608491585422847742:3042], ActorId: [7:7608491585422847743:3043], TraceId: ExecutionId: cfb591f7-e63a0728-e409f0ec-bec8e486, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2026-02-19T08:33:13.348834Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=7&id=N2UwZWU3ZDEtMzI5MTcyZjAtNjE0MGMzYzUtMWJlMTBjNDA=, workerId: [7:7608491585422847745:2598], local sessions count: 1 2026-02-19T08:33:13.362055Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=7&id=ZWUwNTQxNjAtODA1ZTE2ZmUtNWZmYjUyNjktNWFjYjA1Y2E=, workerId: [7:7608491568242978299:2525], local sessions count: 0 >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml >> KqpQueryService::ShowCreateView [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpQueryService::TableSink_BadTransactions [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> KqpQueryService::CloseConnection >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> KqpQueryService::ReadManyRanges >> KqpQueryService::ShowCreateSysView [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> KqpQueryService::Ddl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateView [GOOD] Test command err: 2026-02-19T08:32:55.586143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491509486976008:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.586223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.936194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:55.985309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:55.985415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.037278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.042590Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.050693Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491509486975965:2081] 1771489975562086 != 1771489975562089 2026-02-19T08:32:56.108329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.322485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.322514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.322523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.322612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.607792Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.004379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.014931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.080177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.242072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.393632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.473138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:58.955907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491522371879742:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:58.956029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:58.956377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491522371879751:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:58.956421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.652448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.683610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.714884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.744297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.775831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.828986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.874944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.959366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526666847919:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526666847924:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526666847925:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.962896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:59.972993Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491526666847928:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:00.060373Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491530961815275:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.586500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491509486976008:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.586550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.593314Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491535256782872:2542], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030 ... 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=Mzc4OTNkMDQtMzU2YmMzZmItMjA1NDVkZDMtMzIwNDY2OQ==, ActorId: [2:7608491565927876422:2538], ActorState: ExecuteState, LegacyTraceId: 01khtgg4bn22w6rsqjpcd5d7ee, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "Path type mismatch, expected: Table, found: View" severity: 1 } trace_id# 2026-02-19T08:33:10.032794Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491574523316095:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:10.032877Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:10.048313Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:10.153937Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:10.154014Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:10.156113Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:10.181792Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:10.336582Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:10.357200Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:10.357220Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:10.357226Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:10.357304Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:10.867891Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:10.874334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:10.893241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:10.987093Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.108781Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:11.168685Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.240668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.764135Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491587408219792:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:13.764228Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:13.764474Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491587408219801:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:13.764523Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:13.851984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.901436Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.942113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.983545Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.019698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.073468Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.127082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.173948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.273088Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491591703187972:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.273154Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.273315Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491591703187977:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.273349Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491591703187978:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.273379Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.276947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:14.294789Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491591703187981:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:14.376577Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491591703188032:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:15.033131Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491574523316095:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:15.033196Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: 2026-02-19T08:32:55.572128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491509592914410:2137];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.572166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.924063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:55.966670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:55.966760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.025948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491509592914313:2081] 1771489975568434 != 1771489975568437 2026-02-19T08:32:56.027941Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.035562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.164746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.322400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.322437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.322444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.322523Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.585272Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.043439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.058131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.126276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.290942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.456695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.530611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.125512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526772785373:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.125657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.126077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526772785383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.126147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.657816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.685973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.712529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.740808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.773100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.809211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.848990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.952129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526772786254:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.952195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.952837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526772786259:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.952878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526772786260:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.953004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:59.973560Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491526772786263:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.075566Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491531067753610:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.573793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491509592914410:2137];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.573878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:02.698766Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491539089776365:2091];send_to=[0:7307199536658146131:7762515 ... esult=not_found; 2026-02-19T08:33:14.156931Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.156994Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.157010Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.157311Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.157368Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.157385Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.166353Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.166410Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.166427Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.169621Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.169662Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.169677Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.175562Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.175619Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.175633Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.177156Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.177194Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.177207Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184462Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184519Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184536Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184706Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184742Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.184755Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:14.199699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.278070Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491589975830815:2529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.278170Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.279870Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491589975830820:2532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.279950Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491589975830821:2533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.280081Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.284235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:14.299878Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491589975830824:2534], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-02-19T08:33:14.393459Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491589975830875:2858] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:14.971988Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491568500992888:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:14.972058Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:15.055706Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDIyOTljYjYtMzcyNTkxMmEtMzA0NTRmMS0yNjVmZDk0Mg==, ActorId: [3:7608491589975830996:2556], ActorState: ExecuteState, LegacyTraceId: 01khtgga1w1qpffaednvrrayne, Create QueryResponse for error on request, msg: Write transactions that use both row-oriented and column-oriented tables are disabled at current time. status# PRECONDITION_FAILED issues# trace_id# 2026-02-19T08:33:15.172471Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=OTgzYzk0ZmYtNDUxZDcxNWMtY2Y1NTY5NmMtNGRjM2UwOTY=, ActorId: [3:7608491594270798311:2563], ActorState: ExecuteState, LegacyTraceId: 01khtgga9t3vrdz5bsbtqby3sk, Create QueryResponse for error on request, msg: Write transactions that use both row-oriented and column-oriented tables are disabled at current time. status# PRECONDITION_FAILED issues# trace_id# 2026-02-19T08:33:15.350109Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=NzhhNGU5NDItYTkyODQzZDctMTVkYTEwODAtNmE3ZGRhODU=, ActorId: [3:7608491594270798334:2573], ActorState: ExecuteState, LegacyTraceId: 01khtggade5gxq5315xc2jc3eb, Create QueryResponse for error on request, msg: Write transactions that use both row-oriented and column-oriented tables are disabled at current time. status# PRECONDITION_FAILED issues# trace_id# 2026-02-19T08:33:15.650162Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=YzQ3MDYyYzEtODkyMGY4NTUtNDk5NjU0OC1iM2IwMDZlNA==, ActorId: [3:7608491594270798352:2580], ActorState: ExecuteState, LegacyTraceId: 01khtggak55nwprmh64jtsgfwr, Create QueryResponse for error on request, msg: Write transactions that use both row-oriented and column-oriented tables are disabled at current time. status# PRECONDITION_FAILED issues# trace_id# 2026-02-19T08:33:15.830180Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=NDAzMjhkMjItYjdhMDVkNTEtM2Y4MGMyZTgtNGViODg4NTc=, ActorId: [3:7608491594270798371:2588], ActorState: ExecuteState, LegacyTraceId: 01khtggawhbpdfqncfcxht731x, Create QueryResponse for error on request, msg: Write transactions that use both row-oriented and column-oriented tables are disabled at current time. status# PRECONDITION_FAILED issues# trace_id# >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryPg >> KqpQueryService::AlterTable_SetNotNull_Valid [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpService::CloseSessionAbortQueryExecution [GOOD] >> KqpService::PatternCache >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> KqpQueryService::IssuesInCaseOfSuccess >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged+UseStreamIndex [GOOD] >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged-UseStreamIndex >> KqpQueryService::TableSink_OlapUpdate >> Yq_1::Basic_TaggedLiteral [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: 2026-02-19T08:32:55.572134Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491508722179510:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.572188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.929833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.045840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.045948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.090789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.093230Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.096872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491508722179470:2081] 1771489975570116 != 1771489975570119 2026-02-19T08:32:56.145784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.319314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.319410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.319417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.319492Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.586011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.101438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.109479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.132823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.082744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.084905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.087198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.263803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.311814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.317006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.323007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.323471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:59.323908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:00.262609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491530197017216:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.262826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.263177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491530197017231:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.263232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491530197017232:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.263350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.268090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715669:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.279923Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491530197017236:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715669 completed, doublechecking } 2026-02-19T08:33:00.460820Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491530197017300:2813] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.680599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491508722179510:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.681011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:00.768040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2026-02-19T08:33:00.784022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.962074Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491530197017567:2948] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/3391ca59-4cc8-dc62-8449-8d8b8408b32d\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.972208Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=NGY2YTg0YTUtYjZlZjA4YjctZTdiZmYxNGUtOTQwNDU2OA==, ActorId: [1:7608491530197017293:2382], ActorState: ExecuteState, LegacyTraceId: 01khtgfwgca3g3pvjk94fnmebz, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Executing ESchemeOpCreateTable" issue_code: 2029 severity: 1 issues { message: "Check failed: path: \'/Root/.tmp/sessions/3391ca59-4cc8-dc62-8449-8d8b8408b32d\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" issue_code: 2029 severity: 1 } } trace_id# 2026-02-19T08:33:01.010978Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491530197017598:2431], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:01.013084Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://ses ... tes: 0, created_at: { plan_step: 1771489994337, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994001, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997130, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997179, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489996955, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997004, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994085, tx_id: 281474976715660 } } Scheme entry: { name: ReorderKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997235, tx_id: 281474976715668 } } Scheme entry: { name: ReorderOptionalKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997305, tx_id: 281474976715669 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489999244, tx_id: 281474976715673 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489993826, tx_id: 281474976715658 } } 2026-02-19T08:33:19.340233Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491613177620955:4036] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2026-02-19T08:33:19.340328Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2026-02-19T08:33:19.340630Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=ODBjYTk3OTEtYmQxYWVjOWQtYWJlMDAwNDQtNTVhZDNjMmE=, ActorId: [4:7608491613177620915:2553], ActorState: ExecuteState, LegacyTraceId: 01khtggeevfnwekpv5fe82wss8, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpCreatePersQueueGroup" severity: 1 issues { message: "Scheme operation failed, status: ExecError, reason: Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } } trace_id# Query failed, status: GENERIC_ERROR:
: Error: Executing ESchemeOpCreatePersQueueGroup
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771489997361, tx_id: 281474976715670 } } Scheme entry: { name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771489993182, tx_id: 281474976710657 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994337, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994001, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997130, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997179, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489996955, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997004, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994085, tx_id: 281474976715660 } } Scheme entry: { name: ReorderKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997235, tx_id: 281474976715668 } } Scheme entry: { name: ReorderOptionalKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997305, tx_id: 281474976715669 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489999244, tx_id: 281474976715673 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489993826, tx_id: 281474976715658 } } 2026-02-19T08:33:19.387594Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491613177620975:4047] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:19.387686Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:19.387991Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=ODBjYTk3OTEtYmQxYWVjOWQtYWJlMDAwNDQtNTVhZDNjMmE=, ActorId: [4:7608491613177620915:2553], ActorState: ExecuteState, LegacyTraceId: 01khtggeg66m6n57vd08jfy4wf, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpDropPersQueueGroup" severity: 1 issues { message: "Scheme operation failed, status: ExecError, reason: Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } } trace_id# Query failed, status: GENERIC_ERROR:
: Error: Executing ESchemeOpDropPersQueueGroup
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:19.413861Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491613177620990:4054] txid# 281474976715680, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:19.414201Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715680, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:19.414520Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=ODBjYTk3OTEtYmQxYWVjOWQtYWJlMDAwNDQtNTVhZDNjMmE=, ActorId: [4:7608491613177620915:2553], ActorState: ExecuteState, LegacyTraceId: 01khtggeh4ctepk80q2vp3vczm, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpDropPersQueueGroup" severity: 1 issues { message: "Scheme operation failed, status: ExecError, reason: Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } } trace_id# Query failed, status: GENERIC_ERROR:
: Error: Executing ESchemeOpDropPersQueueGroup
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771489997361, tx_id: 281474976715670 } } Scheme entry: { name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771489993182, tx_id: 281474976710657 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994337, tx_id: 281474976715661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994001, tx_id: 281474976715659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997130, tx_id: 281474976715666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997179, tx_id: 281474976715667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489996955, tx_id: 281474976715662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997004, tx_id: 281474976715663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997053, tx_id: 281474976715664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489994085, tx_id: 281474976715660 } } Scheme entry: { name: ReorderKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997235, tx_id: 281474976715668 } } Scheme entry: { name: ReorderOptionalKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997305, tx_id: 281474976715669 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489997088, tx_id: 281474976715665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489999244, tx_id: 281474976715673 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771489993826, tx_id: 281474976715658 } } 2026-02-19T08:33:19.450236Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyShardsRange ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2026-02-19T08:32:40.896095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491444764566096:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.896139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.063720982 598574 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.063826590 598574 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.240935Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.326542Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.326620Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.327802Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.330659Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.351965Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20924 2026-02-19T08:32:41.352089Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.366802Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.367015Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.367114Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.376881Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.376951Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.390564Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.390828Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.403843Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.411022Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.424670Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.436703Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.436948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.437390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.442961Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.499530Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.499623Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.499736Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.499908Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.526530Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.541075Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.542906Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.544385Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19T08:32:41.591038Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:20924: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:20924 } ] 2026-02-19 ... utputs have been finished. Consider finished 2026-02-19T08:33:19.194441Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:19.194459Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:19.194571Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491611072384820 RawX2: 4503616807242590 } } DstEndpoint { ActorId { RawX1: 7608491611072384821 RawX2: 4503616807242591 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491611072384821 RawX2: 4503616807242591 } } DstEndpoint { ActorId { RawX1: 7608491611072384816 RawX2: 4503616807242298 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:19.194587Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:19.194595Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:19.194609Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:19.194621Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:19.194632Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:19.198474Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:19.198503Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. Taken 0 locks 2026-02-19T08:33:19.198518Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:19.198541Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:19.198561Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:19.198583Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:19.198600Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:19.198630Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-02-19T08:33:19.198652Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. returned 1 rows; processed 1 rows 2026-02-19T08:33:19.198690Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. dropping batch for read #0 2026-02-19T08:33:19.198700Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:19.198712Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:19.198730Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715735, task: 1, CA Id [4:7608491611072384820:2910]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:19.198873Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:19.198905Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:19.199005Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:19.199106Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:19.199134Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491611072384821:2911], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:19.199281Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:19.199467Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:19.199496Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491611072384820:2910], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggeapa7wtxpsrss8cm6ed. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzE3YzNmNC1kYTIzMmFmOS00M2U1YWFhOC04YmE0MTM2. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:19.199645Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpQueryService::AlterCdcTopic [GOOD] >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpService::RangeCache-UseCache [GOOD] >> KqpService::RetryAfterShutdownThenDisconnect >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: 2026-02-19T08:32:55.623645Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491512095559642:2237];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.623723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:56.064836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.105842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.105957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.233831Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491512095559433:2081] 1771489975575284 != 1771489975575287 2026-02-19T08:32:56.241678Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.243340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.257898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.438040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.438069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.438075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.438172Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.633832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.028646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.035166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.078229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.256161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.411400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.479576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.047636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491529275430498:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.047747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.048085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491529275430508:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.048133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.658991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.691221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.719656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.750036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.808582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.838984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.891823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.992212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491529275431389:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.992291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.992762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491529275431394:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.992846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491529275431395:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.992889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.996073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.009122Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491529275431398:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:00.111040Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491533570398747:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.623447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491512095559642:2237];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.623847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.481607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... SIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:15.970393Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:15.970472Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:16.483473Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:16.492093Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:16.510481Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.605255Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.724191Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:16.782624Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.851613Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.623260Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491613752840067:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:19.623367Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:19.623781Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491613752840077:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:19.623824Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:19.708553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.762201Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.804952Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.852622Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.898573Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.947653Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.999178Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:20.085032Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:20.191253Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491618047808253:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.191376Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.191954Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491618047808258:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.192038Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491618047808259:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.192168Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.196610Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:20.207790Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491618047808262:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:20.285438Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491618047808313:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:20.702286Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491596572969052:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:20.702370Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:22.066924Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:22.248055Z node 4 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037927:1][4:7608491626637743409:2559] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:56:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-02-19T08:33:22.328355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:33:22.387826Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491626637743513:4130] txid# 281474976715676, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2026-02-19T08:33:22.388551Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=ZTAxYWQ3MjEtMjU4Yjk4MjAtYzBkMjAwMzgtNTQyMzYxYWI=, ActorId: [4:7608491626637743461:2566], ActorState: ExecuteState, LegacyTraceId: 01khtgghdx7x9z773g5gb3gn68, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "Executing ESchemeOpAlterPersQueueGroup" issue_code: 2017 severity: 1 issues { message: "Cannot change partition count. Use split/merge instead" issue_code: 2017 severity: 1 } } trace_id# Query failed, status: BAD_REQUEST:
: Error: Executing ESchemeOpAlterPersQueueGroup, code: 2017
: Error: Cannot change partition count. Use split/merge instead, code: 2017 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: 2026-02-19T08:32:55.690029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491510445865918:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.694880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.985804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:55.997156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:55.997240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.149127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.192481Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.195476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491510445865863:2081] 1771489975673617 != 1771489975673620 2026-02-19T08:32:56.255663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.322169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.322195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.322203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.322259Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.696126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.032895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.045236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.109414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.282487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.445702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.511485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.132418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527625736941:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.132522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.132769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527625736951:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.132805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.657768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.686269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.712423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.748388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.785324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.828342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.870731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.955862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527625737824:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.955940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.956101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527625737830:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.956143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.956179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527625737829:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.959847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:59.975469Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491527625737833:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:00.074172Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491531920705180:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.675914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491510445865918:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.675982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.663831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715 ... {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:14.483863Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491591572026447:3760] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:15.272332Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491574392154501:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:15.272407Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:16.244272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.332899Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491600161961419:2551], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:16.333364Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NDYxMWI2OTEtZTUxOWIyOS0xZWRjYWYwYy01ODBlZWVmNg==, ActorId: [3:7608491600161961333:2536], ActorState: ExecuteState, LegacyTraceId: 01khtggbg43xsj8jwsv43d8y9g, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/.sys/show_create]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:16.381401Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491600161961436:2555], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:16.381938Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NDYxMWI2OTEtZTUxOWIyOS0xZWRjYWYwYy01ODBlZWVmNg==, ActorId: [3:7608491600161961333:2536], ActorState: ExecuteState, LegacyTraceId: 01khtggbhqar1fg71hvfgvtc6m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/.sys/show_create]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:17.533304Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491604086799489:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:17.536459Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:17.547676Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:17.660295Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:17.661777Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491604086799441:2081] 1771489997531821 != 1771489997531824 2026-02-19T08:33:17.669160Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:17.669250Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:17.689823Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:17.773787Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:17.790291Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:17.790316Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:17.790324Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:17.790400Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:18.363134Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:18.541895Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:21.230418Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491621266669488:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.230546Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.231337Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491621266669524:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.231389Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.231475Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491621266669523:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.235890Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:21.251351Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491621266669527:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:33:21.341556Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491621266669578:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:21.566740Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:21.748178Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:21.945276Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:22.226670Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:22.237223Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710671, at schemeshard: 72057594046644480 2026-02-19T08:33:22.242643Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:22.258081Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710672, at schemeshard: 72057594046644480 2026-02-19T08:33:22.533595Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491604086799489:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:22.533689Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] >> BasicUsage::CreateTopicWithCustomAdvancedMonitoringSettings >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SessionBusyRetryOperationSync [GOOD] Test command err: 2026-02-19T08:32:55.642453Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491511698700617:2198];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.642757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.676608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:32:56.035052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.035227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.058712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.082724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491511698700446:2081] 1771489975602082 != 1771489975602085 2026-02-19T08:32:56.092765Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.093353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.284846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.322406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.322434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.322441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.322535Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.649926Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.076772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.083968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.136817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.319083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.479099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.567680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.143195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528878571511:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.143314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.143880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528878571521:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.143926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.653763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.681438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.724057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.762386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.816436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.848869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.900372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.982013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528878572387:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.982081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.982395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528878572392:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.982437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528878572393:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.982477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.986216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:59.997399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491528878572396:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.071500Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491533173539743:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.630413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491511698700617:2198];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.630506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... ION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTZkYjEzN2YtNjAwMGVjOTktNGNmNTU4YWUtY2Y1YjU3NzY=, ActorId: [3:7608491620491660814:2557], ActorState: ExecuteState, LegacyTraceId: 01khtggggq4mwjq7119ef0trzz, Reply query error, msg: Pending previous query completion proxy_request_id# 22 trace_id# 2026-02-19T08:33:21.456400Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTZkYjEzN2YtNjAwMGVjOTktNGNmNTU4YWUtY2Y1YjU3NzY=, ActorId: [3:7608491620491660814:2557], ActorState: ExecuteState, LegacyTraceId: 01khtggggq4mwjq7119ef0trzz, Reply query error, msg: Pending previous query completion proxy_request_id# 23 trace_id# 2026-02-19T08:33:21.456468Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTZkYjEzN2YtNjAwMGVjOTktNGNmNTU4YWUtY2Y1YjU3NzY=, ActorId: [3:7608491620491660814:2557], ActorState: ExecuteState, LegacyTraceId: 01khtggggq4mwjq7119ef0trzz, Reply query error, msg: Pending previous query completion proxy_request_id# 24 trace_id# 2026-02-19T08:33:21.458667Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTZkYjEzN2YtNjAwMGVjOTktNGNmNTU4YWUtY2Y1YjU3NzY=, ActorId: [3:7608491620491660814:2557], ActorState: ExecuteState, LegacyTraceId: 01khtggggq4mwjq7119ef0trzz, Reply query error, msg: Pending previous query completion proxy_request_id# 25 trace_id# 2026-02-19T08:33:21.598486Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 28 trace_id# 2026-02-19T08:33:21.611516Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 29 trace_id# 2026-02-19T08:33:21.611598Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 30 trace_id# 2026-02-19T08:33:21.624056Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 31 trace_id# 2026-02-19T08:33:21.629217Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 32 trace_id# 2026-02-19T08:33:21.631861Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 33 trace_id# 2026-02-19T08:33:21.634467Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFlMDI1N2QtYTczYjIwYzYtYTliNDA5NjItOTc1MjFmNTg=, ActorId: [3:7608491620491660849:2572], ActorState: ExecuteState, LegacyTraceId: 01khtgggnm6r8asyghe30zw9cm, Reply query error, msg: Pending previous query completion proxy_request_id# 34 trace_id# 2026-02-19T08:33:21.781781Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 37 trace_id# 2026-02-19T08:33:21.781874Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 38 trace_id# 2026-02-19T08:33:21.784410Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 39 trace_id# 2026-02-19T08:33:21.788008Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 40 trace_id# 2026-02-19T08:33:21.788092Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 41 trace_id# 2026-02-19T08:33:21.793597Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=ZTM2MmU4MDktNTdjZDRiMGMtM2Q2ZjQ1NTEtM2ZkOTZiNjc=, ActorId: [3:7608491620491660884:2586], ActorState: ExecuteState, LegacyTraceId: 01khtgggvnd7jvr2xydyr7219b, Reply query error, msg: Pending previous query completion proxy_request_id# 42 trace_id# 2026-02-19T08:33:21.918214Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTYwNjE5NmItNWI2OGZjOS1jZmFhZjZmLTNjOGVkYzdk, ActorId: [3:7608491620491660918:2600], ActorState: ExecuteState, LegacyTraceId: 01khtgggzn4vng6btzr5c2ab2q, Reply query error, msg: Pending previous query completion proxy_request_id# 45 trace_id# 2026-02-19T08:33:21.918286Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTYwNjE5NmItNWI2OGZjOS1jZmFhZjZmLTNjOGVkYzdk, ActorId: [3:7608491620491660918:2600], ActorState: ExecuteState, LegacyTraceId: 01khtgggzn4vng6btzr5c2ab2q, Reply query error, msg: Pending previous query completion proxy_request_id# 46 trace_id# 2026-02-19T08:33:21.919821Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTYwNjE5NmItNWI2OGZjOS1jZmFhZjZmLTNjOGVkYzdk, ActorId: [3:7608491620491660918:2600], ActorState: ExecuteState, LegacyTraceId: 01khtgggzn4vng6btzr5c2ab2q, Reply query error, msg: Pending previous query completion proxy_request_id# 47 trace_id# 2026-02-19T08:33:21.921071Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTYwNjE5NmItNWI2OGZjOS1jZmFhZjZmLTNjOGVkYzdk, ActorId: [3:7608491620491660918:2600], ActorState: ExecuteState, LegacyTraceId: 01khtgggzn4vng6btzr5c2ab2q, Reply query error, msg: Pending previous query completion proxy_request_id# 48 trace_id# 2026-02-19T08:33:21.925633Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YTYwNjE5NmItNWI2OGZjOS1jZmFhZjZmLTNjOGVkYzdk, ActorId: [3:7608491620491660918:2600], ActorState: ExecuteState, LegacyTraceId: 01khtgggzn4vng6btzr5c2ab2q, Reply query error, msg: Pending previous query completion proxy_request_id# 49 trace_id# 2026-02-19T08:33:22.045490Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=MjkzOWM0MGItZTU1ZjNmZWQtOTI3MGNmMmEtMjIyZjJkOTc=, ActorId: [3:7608491624786628245:2612], ActorState: ExecuteState, LegacyTraceId: 01khtggh3wdcs6qk6b79fv4c64, Reply query error, msg: Pending previous query completion proxy_request_id# 52 trace_id# 2026-02-19T08:33:22.048956Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=MjkzOWM0MGItZTU1ZjNmZWQtOTI3MGNmMmEtMjIyZjJkOTc=, ActorId: [3:7608491624786628245:2612], ActorState: ExecuteState, LegacyTraceId: 01khtggh3wdcs6qk6b79fv4c64, Reply query error, msg: Pending previous query completion proxy_request_id# 53 trace_id# 2026-02-19T08:33:22.050369Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=MjkzOWM0MGItZTU1ZjNmZWQtOTI3MGNmMmEtMjIyZjJkOTc=, ActorId: [3:7608491624786628245:2612], ActorState: ExecuteState, LegacyTraceId: 01khtggh3wdcs6qk6b79fv4c64, Reply query error, msg: Pending previous query completion proxy_request_id# 54 trace_id# 2026-02-19T08:33:22.052347Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=MjkzOWM0MGItZTU1ZjNmZWQtOTI3MGNmMmEtMjIyZjJkOTc=, ActorId: [3:7608491624786628245:2612], ActorState: ExecuteState, LegacyTraceId: 01khtggh3wdcs6qk6b79fv4c64, Reply query error, msg: Pending previous query completion proxy_request_id# 55 trace_id# 2026-02-19T08:33:22.190280Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YzdlMmJmN2MtMWNlYjY0NGUtYzk5YTQzZGQtYmI0MmY3OTI=, ActorId: [3:7608491624786628276:2625], ActorState: ExecuteState, LegacyTraceId: 01khtggh844cbxkdyag24j65be, Reply query error, msg: Pending previous query completion proxy_request_id# 58 trace_id# 2026-02-19T08:33:22.190356Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YzdlMmJmN2MtMWNlYjY0NGUtYzk5YTQzZGQtYmI0MmY3OTI=, ActorId: [3:7608491624786628276:2625], ActorState: ExecuteState, LegacyTraceId: 01khtggh844cbxkdyag24j65be, Reply query error, msg: Pending previous query completion proxy_request_id# 59 trace_id# 2026-02-19T08:33:22.190401Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YzdlMmJmN2MtMWNlYjY0NGUtYzk5YTQzZGQtYmI0MmY3OTI=, ActorId: [3:7608491624786628276:2625], ActorState: ExecuteState, LegacyTraceId: 01khtggh844cbxkdyag24j65be, Reply query error, msg: Pending previous query completion proxy_request_id# 60 trace_id# 2026-02-19T08:33:22.385485Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFmNzM1OGEtZGU0NjZjZDktZDYzMTBhZmMtZjM1YzJjZjc=, ActorId: [3:7608491624786628303:2635], ActorState: ExecuteState, LegacyTraceId: 01khtggheaa7yj1649txjgj5jt, Reply query error, msg: Pending previous query completion proxy_request_id# 63 trace_id# 2026-02-19T08:33:22.385547Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=YmFmNzM1OGEtZGU0NjZjZDktZDYzMTBhZmMtZjM1YzJjZjc=, ActorId: [3:7608491624786628303:2635], ActorState: ExecuteState, LegacyTraceId: 01khtggheaa7yj1649txjgj5jt, Reply query error, msg: Pending previous query completion proxy_request_id# 64 trace_id# 2026-02-19T08:33:22.505896Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=3&id=MjE5MmYwZjctYzNlNWJmODEtNTU2ZGNjMjEtMTg0ZTBkMDY=, ActorId: [3:7608491624786628327:2644], ActorState: ExecuteState, LegacyTraceId: 01khtgghj865nkyk87zwjvfv80, Reply query error, msg: Pending previous query completion proxy_request_id# 67 trace_id# >> KqpQueryService::Write >> Yq_1::DescribeQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: 2026-02-19T08:33:04.203654Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491550095328313:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:04.203704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:04.272838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:04.632726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:04.659866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:04.659975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:04.709850Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491550095328194:2081] 1771489984134821 != 1771489984134824 2026-02-19T08:33:04.720019Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:04.727318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:04.998655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:05.029505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:05.029532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:05.029538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:05.029617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:05.253873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:05.668107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:05.682685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:07.871575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491562980230983:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.871650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491562980230994:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.871696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.873903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491562980230998:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.873996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.875220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:07.885353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491562980230997:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:33:08.052245Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491562980231050:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:08.362827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.522618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:33:08.605379Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:33:08.628824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.946068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:33:09.084263Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-02-19T08:33:09.111205Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491571570166156:2407], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:09.111672Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZTMwM2I0Y2QtMmU4ZDAzNTUtMmY0NDIxYTMtMWVjZjI0YTg=, ActorId: [1:7608491571570166154:2406], ActorState: ExecuteState, LegacyTraceId: 01khtgg4ewdq6yspa1n791qy9k, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 21 } message: "At function: KiReadTable!" end_position { row: 3 column: 21 } severity: 1 issues { position { row: 3 column: 21 } message: "Cannot find table \'db.[/Root/Temp]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:09.130540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:09.146643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:09.168783Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491571570166206:2415], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:09.170561Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZjhlNWI4ZTUtYjBlYWZiMzMtNTRjMTRhMTItOGE3Y2Q3NmQ=, ActorId: [1:7608491571570166204:2414], ActorState: ExecuteState, LegacyTraceId: 01khtgg4gz44d344k0m4hpk1jr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 17 } message: "At function: KiReadTable!" end_position { row: 3 column: 17 } severity: 1 issues { position { row: 3 column: 17 } message: "Cannot find table \'db.[/Root/Temp]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:09.203602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491550095328313:2148];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:09.203682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:09.942586Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491568417901807:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:09.943078Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:10.018148Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [Sc ... 910314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491603900066848:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.910423Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.910736Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:17.931876Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491603900066847:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:33:18.002537Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491603900066900:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:18.040863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:18.285915Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491608195034354:2620] txid# 281474976710662, issues: { message: "CreateSetConstraintInitiate is not implemented. TablePath = \'/Root/test/alterNotNull\'" severity: 1 } 2026-02-19T08:33:18.286861Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491608195034346:2354], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:30: Error: Execution of node: KiAlterTable!
: Error: CreateSetConstraintInitiate is not implemented. TablePath = '/Root/test/alterNotNull', code: 2029 2026-02-19T08:33:18.289527Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NzljNDhlNy04Y2QxYTM2ZC02MWIzOTQ0ZC0zMjhkMTI1MQ==, ActorId: [3:7608491608195034344:2353], ActorState: ExecuteState, LegacyTraceId: 01khtggddh73vxgz8n3n7hcy2j, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 30 } message: "Execution of node: KiAlterTable!" end_position { row: 3 column: 30 } severity: 1 issues { message: "CreateSetConstraintInitiate is not implemented. TablePath = \'/Root/test/alterNotNull\'" issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:19.179123Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491611571411236:2146];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:19.179165Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:19.259820Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:19.357931Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491611571411128:2081] 1771489999166550 != 1771489999166553 2026-02-19T08:33:19.376518Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:19.385376Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:19.385467Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:19.393526Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:19.526397Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:19.526422Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:19.526430Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:19.526529Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:19.533812Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:20.119020Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:20.126672Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:20.223180Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:23.106204Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491628751281181:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:23.106214Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491628751281200:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:23.106332Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:23.106724Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491628751281218:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:23.106783Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:23.110782Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:23.126971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-02-19T08:33:23.127453Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491628751281217:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:33:23.200977Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491628751281270:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:23.302417Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:23.582896Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491628751281424:2355], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:33:23.585318Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=OGJkMzc2YmUtZTQ4ODVjYmQtMzAwNmRiMWEtYzkzMDBjNmI=, ActorId: [4:7608491628751281422:2354], ActorState: ExecuteState, LegacyTraceId: 01khtggjk8622cyv63tayqe43z, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 30 } message: "At function: KiWriteTable!" end_position { row: 2 column: 30 } severity: 1 issues { position { row: 2 column: 84 } message: "Failed to convert type: Struct<\'id\':Int32,\'val1\':Null,\'val2\':Int32> to Struct<\'id\':Int32,\'val1\':Int32,\'val2\':Int32?>" end_position { row: 2 column: 84 } severity: 1 issues { position { row: 2 column: 84 } message: "Failed to convert \'val1\': Null to Int32" end_position { row: 2 column: 84 } severity: 1 } } issues { position { row: 2 column: 84 } message: "Failed to convert input columns types to scheme types" end_position { row: 2 column: 84 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:23.624160Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-02-19T08:33:23.673831Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::DdlColumnTable >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2026-02-19T08:32:41.022571Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491451152698970:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:41.022895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.169624165 598749 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.169793207 598749 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.361179Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.361487Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469233Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469335Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469412Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469467Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469510Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.469556Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.569862Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.573880Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.573948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577259Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577308Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577351Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577432Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577489Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577513Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.577832Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25804 2026-02-19T08:32:41.577905Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.579064Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.582332Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.645184Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.649810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.664663Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.672935Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.674450Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.730651Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.741735Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.760730Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026-02-19T08:32:41.766736Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25804: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25804 } ] 2026- ... 707740Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:22.707760Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:22.707876Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491626000412404 RawX2: 4503616807242575 } } DstEndpoint { ActorId { RawX1: 7608491626000412405 RawX2: 4503616807242576 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491626000412405 RawX2: 4503616807242576 } } DstEndpoint { ActorId { RawX1: 7608491626000412400 RawX2: 4503616807242216 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:22.707895Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:22.707903Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:22.707918Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:22.707931Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:22.707940Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:22.709357Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:22.709387Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. Taken 0 locks 2026-02-19T08:33:22.709400Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:22.709435Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:22.709453Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:22.709473Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:22.709492Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:22.709506Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:22.709520Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. returned 0 rows; processed 0 rows 2026-02-19T08:33:22.709568Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. dropping batch for read #0 2026-02-19T08:33:22.709579Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. effective maxinflight 1 sorted 1 2026-02-19T08:33:22.709592Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:22.709607Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715733, task: 1, CA Id [4:7608491626000412404:2895]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:22.709672Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:22.709722Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:22.709770Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:22.709792Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:22.709810Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491626000412404:2895], TxId: 281474976715733, task: 1. Ctx: { TraceId : 01khtggh923f4gcv20ysp459ye. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:22.709991Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715733;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:22.710209Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:22.710239Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491626000412405:2896], TxId: 281474976715733, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggh923f4gcv20ysp459ye. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzNiNzI3MGQtZWYyYzdhMjItODQwOWJiNGUtYzAxNGVjZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:22.710324Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715733;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> KqpQueryService::CloseConnection [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict >> KqpNamedExpressions::NamedExpressionRandomUpsertRevert-UseSink-UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomUpsertRevert+UseSink-UseDataQuery >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2026-02-19T08:32:40.897465Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491443712638195:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.898134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.043491517 598553 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.043649604 598553 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.247455Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.269952Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.322845Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.323158Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.323211Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.323838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.324808Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.386611Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.394953Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:11297 2026-02-19T08:32:41.396184Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.402531Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.403329Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.417004Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.417113Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.421462Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.430562Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.444069Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.447503Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.453231Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.465523Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.472230Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.472302Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.481501Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.489894Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.524755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.533060Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.536683Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.552751Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.553801Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.557259Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:11297: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11297 } ] 2026-02-19T08:32:41.557339Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks" ... Q=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:23.832893Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:23.832984Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491628340714609 RawX2: 4503616807242641 } } DstEndpoint { ActorId { RawX1: 7608491628340714610 RawX2: 4503616807242642 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491628340714610 RawX2: 4503616807242642 } } DstEndpoint { ActorId { RawX1: 7608491628340714605 RawX2: 4503616807242026 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:23.833002Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:23.833011Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:23.833024Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:23.833038Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:23.833046Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:23.834340Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:23.834361Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. Taken 0 locks 2026-02-19T08:33:23.834372Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:23.834395Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:23.834414Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:23.834429Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:23.834446Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:23.834471Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 1 freeSpace: 8386351 2026-02-19T08:33:23.834491Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. returned 1 rows; processed 1 rows 2026-02-19T08:33:23.834531Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. dropping batch for read #0 2026-02-19T08:33:23.834541Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:23.834553Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:23.834567Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715743, task: 1, CA Id [4:7608491628340714609:2961]. returned async data processed rows 1 left freeSpace 8386351 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:23.834671Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:23.834700Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:23.834778Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:23.834830Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:23.834845Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491628340714610:2962], TxId: 281474976715743, task: 2. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:23.834958Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715743;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:23.835111Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:23.835131Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491628340714609:2961], TxId: 281474976715743, task: 1. Ctx: { TraceId : 01khtggjgp7bk0r94s9gga7b73. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZDQyMTkyNjItMzJkYThhNjEtOWUyMWRiMmItYTBhNzNjOWQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:23.835265Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715743;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:24.023417Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:14659: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:14659 >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryService::ReadManyRangesAndPoints >> KqpQueryService::TableSink_Htap+withOltpSink >> KqpQueryService::Ddl_Dml [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] Test command err: 2026-02-19T08:32:43.686583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491457716767669:2144];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.689756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:43.950995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:43.951102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:43.988465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.043594Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491457716767557:2081] 1771489963670882 != 1771489963670885 2026-02-19T08:32:44.099804Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:44.466229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:44.466253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:44.466270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:44.466347Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:44.692727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:44.701608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:46.702643Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.709327Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:46.709422Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:46.709438Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.715827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.715847Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Creating table 2026-02-19T08:32:46.715891Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.716026Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.716032Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Creating table 2026-02-19T08:32:46.716044Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.716069Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.716074Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Creating table 2026-02-19T08:32:46.716083Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:46.726592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.728699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.734581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.743250Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:32:46.743309Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Subscribe on create table tx: 281474976710658 2026-02-19T08:32:46.743387Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:32:46.743402Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Subscribe on create table tx: 281474976710660 2026-02-19T08:32:46.744055Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:32:46.744100Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Subscribe on create table tx: 281474976710659 2026-02-19T08:32:46.747626Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Subscribe on tx: 281474976710658 registered 2026-02-19T08:32:46.747639Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Subscribe on tx: 281474976710660 registered 2026-02-19T08:32:46.747652Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Subscribe on tx: 281474976710659 registered 2026-02-19T08:32:46.870985Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-02-19T08:32:46.900621Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-02-19T08:32:46.908245Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608491470601670283:2489] Owner: [1:7608491470601670280:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-02-19T08:32:46.963460Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Table already exists, number of columns: 7, has SecurityObject: true 2026-02-19T08:32:46.963512Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Column diff is empty, finishing 2026-02-19T08:32:46.964329Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Table already exists, number of columns: 34, has SecurityObject: true 2026-02-19T08:32:46.964387Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Column diff is empty, finishing 2026-02-19T08:32:46.964484Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.964496Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:46.965447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:46.966070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:46.966686Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710662 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:32:46.966715Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7608491470601670284:2490] Owner: [1:7608491470601670280:2487]. Successful alter request: ExecComplete 2026-02-19T08:32:46.966871Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491470601670282:2488] Owner: [1:7608491470601670280:2487]. TEvProposeTransactionStatus: { Status: 48 ... OM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:33:24.962137Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=NTU2ZGFmZC0zZmU4YTBmNC04YjIzM2RlMy02YjUzZjhh, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 89, targetId: [4:7608491635239828522:2701] 2026-02-19T08:33:24.962165Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 89 timeout: 300.000000s actor id: [4:7608491635239828524:3208] 2026-02-19T08:33:24.969059Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 89, sender: [4:7608491635239828523:2702], selfId: [4:7608491587995186139:2247], source: [4:7608491635239828522:2701] 2026-02-19T08:33:24.969421Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [4:7608491635239828519:3206], ActorId: [4:7608491635239828520:3207], TraceId: ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTU2ZGFmZC0zZmU4YTBmNC04YjIzM2RlMy02YjUzZjhh, TxId: 2026-02-19T08:33:24.970146Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [4:7608491635239828519:3206], ActorId: [4:7608491635239828520:3207], TraceId: ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NTU2ZGFmZC0zZmU4YTBmNC04YjIzM2RlMy02YjUzZjhh, TxId: 2026-02-19T08:33:24.970182Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2379: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [4:7608491635239828519:3206], ActorId: [4:7608491635239828520:3207], TraceId: ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-02-19T08:33:24.970249Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [4:7608491635239828518:3205], ActorId: [4:7608491635239828519:3206], TraceId: ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf, RequestDatabase: /dc-1, Got response [4:7608491635239828520:3207] SUCCESS 2026-02-19T08:33:24.970293Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2492: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [4:7608491635239828517:3204] ActorId: [4:7608491635239828518:3205] Database: /dc-1 ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf. Extracted script execution operation [4:7608491635239828520:3207], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [4:7608491635239828326:3136], LeaseGeneration: 0 2026-02-19T08:33:24.970312Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2518: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [4:7608491635239828517:3204] ActorId: [4:7608491635239828518:3205] Database: /dc-1 ExecutionId: d486acef-5f26e380-46692d01-eee4a4bf. Reply success 2026-02-19T08:33:24.970630Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=NTU2ZGFmZC0zZmU4YTBmNC04YjIzM2RlMy02YjUzZjhh, workerId: [4:7608491635239828522:2701], local sessions count: 1 2026-02-19T08:33:25.988421Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtggmz3d9p7y8w59a5pz5cy, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [4:7608491622354925984:2534] 2026-02-19T08:33:25.988478Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 90 timeout: 300.000000s actor id: [4:7608491639534795845:3216] 2026-02-19T08:33:26.004378Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763145:3218], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.004424Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763145:3218], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.006830Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491639534795846:2712], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:26.009548Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, ActorId: [4:7608491622354925984:2534], ActorState: ExecuteState, LegacyTraceId: 01khtggmz3d9p7y8w59a5pz5cy, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:26.009909Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtggmz3d9p7y8w59a5pz5cy", Forwarded response to sender actor, requestId: 90, sender: [4:7608491639534795844:2711], selfId: [4:7608491587995186139:2247], source: [4:7608491622354925984:2534] 2026-02-19T08:33:26.021986Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtggn053yv9vbjg1bqf0ny8, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 91, targetId: [4:7608491622354925984:2534] 2026-02-19T08:33:26.022036Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 91 timeout: 300.000000s actor id: [4:7608491643829763148:3219] 2026-02-19T08:33:26.043244Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763152:3221], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.043280Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763152:3221], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.045679Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491643829763149:2715], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_execution_leases]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:26.046139Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, ActorId: [4:7608491622354925984:2534], ActorState: ExecuteState, LegacyTraceId: 01khtggn053yv9vbjg1bqf0ny8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_execution_leases]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:26.046324Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtggn053yv9vbjg1bqf0ny8", Forwarded response to sender actor, requestId: 91, sender: [4:7608491643829763147:2714], selfId: [4:7608491587995186139:2247], source: [4:7608491622354925984:2534] 2026-02-19T08:33:26.063713Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtggn1f1kgg6t33ycenv13v, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 92, targetId: [4:7608491622354925984:2534] 2026-02-19T08:33:26.063754Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 92 timeout: 300.000000s actor id: [4:7608491643829763155:3222] 2026-02-19T08:33:26.082157Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763159:3224], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.082190Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7608491643829763159:3224], for# root@builtin, access# DescribeSchema 2026-02-19T08:33:26.084627Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491643829763156:2718], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/result_sets]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:26.085133Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, ActorId: [4:7608491622354925984:2534], ActorState: ExecuteState, LegacyTraceId: 01khtggn1f1kgg6t33ycenv13v, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/result_sets]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:26.085280Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtggn1f1kgg6t33ycenv13v", Forwarded response to sender actor, requestId: 92, sender: [4:7608491643829763154:2717], selfId: [4:7608491587995186139:2247], source: [4:7608491622354925984:2534] 2026-02-19T08:33:26.099453Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=ZmU1NDdmZTktYWI5ZWIwNGEtYTkzZWU1MDUtYmE5OTNlYmE=, workerId: [4:7608491622354925984:2534], local sessions count: 0 >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> KqpQueryService::TableSink_OltpDelete [GOOD] >> KqpQueryService::ExecuteCollectMeta >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::MaterializeTxResults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: 2026-02-19T08:32:55.628699Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491510405591110:2225];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.629003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.945824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.010034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.010159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.020965Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.050922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.145010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.321062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.321100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.321110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.321188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.621950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.123639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.130270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.194626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.320600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.474202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.551177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.242821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527585461962:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.242946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.243297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527585461972:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.243361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.653403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.691113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.727327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.785722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.818753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.854528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.903286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.996913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527585462843:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527585462848:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491527585462849:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.000804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.013680Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491527585462852:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.086763Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491531880430199:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.626532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491510405591110:2225];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.626585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.695877Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491536175397817:3985] txid# 281474976710674, issues: { message: "User already exists" severity: 1 } 2026-02-19T08:33:01.706703Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=1&id=YmU3MDI5MDEtNjUxNDJjYTctOTgxYmQ2MzEtNGNmYjQ1NjA=, ActorId: [1:7608491536175397809:2542], ActorState: Exec ... us# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 11 column: 17 } message: "At function: KiReadTable!" end_position { row: 11 column: 17 } severity: 1 issues { position { row: 11 column: 17 } message: "Cannot find table \'db.[/Root/TestDdlDml5]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 11 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:24.520390Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:24.647859Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:25.176351Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491638762664335:4373] txid# 281474976715699, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:25.176624Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:25.176925Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=ZjAxYjVhNmUtNzBiZGEyNmUtZjQzMjQ1MzMtY2Q5NjJlYjc=, ActorId: [4:7608491638762664322:2687], ActorState: ExecuteState, LegacyTraceId: 01khtggm4t4nbbcgw33p8xpdcp, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpCreateTable" severity: 1 issues { message: "Scheme operation failed, status: ExecComplete, reason: Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } } trace_id# 2026-02-19T08:33:25.220122Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491638762664359:4384] txid# 281474976715701, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:25.220220Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715701, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:25.220532Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=MWQzZTJjN2YtNjFmYTY1MTItNDBiNmExZDAtOTBmZjk0Zg==, ActorId: [4:7608491638762664346:2694], ActorState: ExecuteState, LegacyTraceId: 01khtggm693kvnsesbz0esp2cc, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpCreateTable" severity: 1 issues { message: "Scheme operation failed, status: ExecComplete, reason: Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } } trace_id# 2026-02-19T08:33:25.594763Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:25.719630Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491638762664531:4478] txid# 281474976715706, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:25.719901Z node 4 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715706, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges) 2026-02-19T08:33:25.720216Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=Yzg0Mzc1OTYtMjhlNTdhMjItZWE0NDQ4MTQtYjQ0NzE2MWE=, ActorId: [4:7608491638762664425:2719], ActorState: ExecuteState, LegacyTraceId: 01khtggmfsbkx7j076dm0jq665, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing ESchemeOpCreateTable" severity: 1 issues { message: "Scheme operation failed, status: ExecComplete, reason: Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 58], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } } trace_id# 2026-02-19T08:33:25.912460Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491638762664584:2758], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:25.912852Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=YWVjM2Y2OGYtMzQzYjM0N2MtYTBkZWEyNTctYTFlYTVkNzM=, ActorId: [4:7608491638762664581:2756], ActorState: ExecuteState, LegacyTraceId: 01khtggmvv052pnjgkhqzzt4fs, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/TestDdl4]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:26.136570Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:26.841801Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:506: Get parsing result with error, self: [4:7608491643057632109:2809], owner: [4:7608491617287825795:2389], statement id: 1 2026-02-19T08:33:26.842288Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=NTNiN2IyNDAtOWZmYzU4MjEtYTQ4ODY0ZWItMzFlZmE2ZGQ=, ActorId: [4:7608491643057632107:2808], ActorState: ExecuteState, LegacyTraceId: 01khtggnscbf819gvtg8hgdyjg, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Error while parsing query." severity: 1 issues { position { row: 3 column: 31 } message: "Unknown name: $a" end_position { row: 3 column: 31 } severity: 1 } } tx_id# trace_id# 2026-02-19T08:33:27.084779Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491647352599442:2825], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:33:27.085897Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZWZhNzIxNmMtYzU4MDA3Ny0xOTc3NmY3ZS0xZjk4NzdiYg==, ActorId: [4:7608491643057632132:2819], ActorState: ExecuteState, LegacyTraceId: 01khtggnx70vp904dbfa7y1ga3, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 44 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value\':String> to Struct<\'Key\':Uint64?,\'Value\':Uint64?>" end_position { row: 3 column: 44 } severity: 1 issues { position { row: 3 column: 44 } message: "Failed to convert \'Value\': String to Optional" end_position { row: 3 column: 44 } severity: 1 } } issues { position { row: 3 column: 44 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 44 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:27.170971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:27.279291Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491647352599554:2847], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2026-02-19T08:33:27.279723Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=ZmNiNGMxODgtYzFmNWY0Mi05ZmQ5NWJmMi1lZDMzZjUxMQ==, ActorId: [4:7608491647352599466:2834], ActorState: ExecuteState, LegacyTraceId: 01khtggp2s6zvbed8rqy45j4am, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 8 column: 29 } message: "At function: KiWriteTable!" end_position { row: 8 column: 29 } severity: 1 issues { position { row: 8 column: 44 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value\':String> to Struct<\'Key\':Uint64?,\'Value\':Uint64?>" end_position { row: 8 column: 44 } severity: 1 issues { position { row: 8 column: 44 } message: "Failed to convert \'Value\': String to Optional" end_position { row: 8 column: 44 } severity: 1 } } issues { position { row: 8 column: 44 } message: "Failed to convert input columns types to scheme types" end_position { row: 8 column: 44 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# >> KqpQueryService::Followers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: 2026-02-19T08:33:10.867414Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491576440228488:2146];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:10.867457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:10.932626Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.006334s 2026-02-19T08:33:11.238121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:11.264435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:11.264540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:11.357476Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:11.361834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491576440228379:2081] 1771489990841384 != 1771489990841387 2026-02-19T08:33:11.375286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:11.597287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:11.698371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:11.698396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:11.698414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:11.698499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:11.920783Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:12.196687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:12.213622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:14.591931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491593620098467:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.592069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.592366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491593620098477:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.592414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.886002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:33:15.037192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:15.037203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:15.037436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:15.037702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:15.037832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:15.037835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:15.037954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:15.038041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:15.038088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:15.038165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:15.038208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:15.038304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:15.038340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:15.038404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:15.038445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:15.038502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:15.038537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:15.038619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:15.038670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:15.038725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:15.038799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:15.038814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:15.038937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:15.038944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491593620098615:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:15.039073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:15.039226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491593620098614:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register ... les_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:21.396639Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:21.396656Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:21.422649Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491622909935502:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.422726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.422791Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491622909935507:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.423002Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491622909935509:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.423038Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:21.426148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:21.441826Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491622909935510:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:33:21.524408Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491622909935562:2771] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:22.247752Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608491627204903149:2557], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2026-02-19T08:33:22.249183Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=OGU5YjdhYzctYzRmMmU4ODEtYTliYjRmMzQtMjlmYTc5MmY=, ActorId: [2:7608491627204903147:2556], ActorState: ExecuteState, LegacyTraceId: 01khtggh9c4tddh915wb1cbjy0, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 29 } message: "At function: KiWriteTable!" end_position { row: 2 column: 29 } severity: 1 issues { position { row: 2 column: 29 } message: "Missing key column in input: Col1 for table: /Root/DataShard" end_position { row: 2 column: 29 } issue_code: 2029 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:22.608085Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7608491605730065023:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:22.608149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:23.640859Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491631340473416:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:23.640913Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:23.656727Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:23.766688Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491631340473385:2081] 1771490003637463 != 1771490003637466 2026-02-19T08:33:23.781549Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:23.792929Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:23.793015Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:23.796111Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:23.889593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:23.894322Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:23.894339Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:23.894346Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:23.894429Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:24.372206Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:24.380630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:24.649871Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:27.374748Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491648520343459:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.374852Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.375086Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491648520343469:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.375110Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.401016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:27.471744Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491648520343564:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.471847Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.472324Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491648520343569:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.472373Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491648520343570:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.472480Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:27.476790Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:27.486779Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491648520343573:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:33:27.552117Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491648520343624:2595] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2026-02-19T08:32:40.927392Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491447100546687:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.927459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.090150677 598613 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.090299121 598613 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.250601Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.387651Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.397461Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.398423Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.399868Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.410353Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.417539Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.417853Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.437512Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.437572Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.439452Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.448097Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.450988Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.451042Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.504721Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.520623Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.528909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.544004Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.545103Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:63359 2026-02-19T08:32:41.575680Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.578581Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.583398Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.591043Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.593193Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.593263Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.613671Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.620631Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.620781Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.664986Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2026-02-19T08:32:41.666267Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63359: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63359 } ] 2 ... 76715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:27.041379Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:33:27.041458Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-02-19T08:33:27.041540Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5363: SelfId: [4:7608491648467163691:3012], TxId: 281474976715757, task: 2. Add data: 234 / 234 2026-02-19T08:33:27.041593Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5331: SelfId: [4:7608491648467163691:3012], TxId: 281474976715757, task: 2. Send data=234, closed=1, bufferActorId=[4:7608491648467163683:2388] 2026-02-19T08:33:27.041614Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 234 2026-02-19T08:33:27.041629Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2203: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Waiting finish of sink[0] 2026-02-19T08:33:27.041646Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491648467163688:3011], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:27.041668Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491648467163688:3011], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:27.041858Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715757;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:27.042065Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3024: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:56:1]). lockId=281474976715751. ActorId=[4:7608491648467163692:2388] 2026-02-19T08:33:27.042122Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:510: Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Open: token=0 2026-02-19T08:33:27.042342Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:518: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Write: token=0 2026-02-19T08:33:27.042445Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:525: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Close: token=0 2026-02-19T08:33:27.042486Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5192: SelfId: [4:7608491648467163691:3012], TxId: 281474976715757, task: 2. TKqpForwardWriteActor receive EvBufferWriteResult from [4:7608491648467163683:2388] 2026-02-19T08:33:27.042498Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5233: SelfId: [4:7608491648467163691:3012], TxId: 281474976715757, task: 2. Finished 2026-02-19T08:33:27.042515Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:33:27.042531Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491648467163689:3012], TxId: 281474976715757, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggnn3et74n7dtbaehk7x4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZWVlYjE4ZDMtMjc5ODIxNmItYzY2YzQ1ZGItNjhhZDg1NmM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:27.042620Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715757;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:27.042825Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3628: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Start prepare for distributed commit 2026-02-19T08:33:27.042840Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1091: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]SetPrepare; txId=281474976715757 2026-02-19T08:33:27.042853Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3551: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Flush data 2026-02-19T08:33:27.042984Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1265: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Send EvWrite to ShardID=72075186224037895, isPrepare=1, isImmediateCommit=0, TxId=281474976715757, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715751 DataShard: 72075186224037895 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 56, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=324 2026-02-19T08:33:27.043122Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3765: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Send EvWrite (external) to ShardID=72075186224037892, isPrepare=1, isRollback=0, TxId=281474976715757, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715751 DataShard: 72075186224037892 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 50, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2026-02-19T08:33:27.043712Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4295: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Recv EvWriteResult (external) from ShardID=72075186224037892, Status=STATUS_PREPARED, TxId=281474976715757, Locks= , Cookie=0 2026-02-19T08:33:27.043749Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4614: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Got prepared result TxId=281474976715757, TabletId=72075186224037892, Cookie=0 2026-02-19T08:33:27.043768Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3551: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Flush data 2026-02-19T08:33:27.043808Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:769: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_PREPARED, TxId=281474976715757, Locks= , Cookie=1 2026-02-19T08:33:27.043848Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3676: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Start distributed commit with TxId=281474976715757 2026-02-19T08:33:27.043862Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1099: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]SetDistributedCommit; txId=281474976715757 2026-02-19T08:33:27.043888Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3864: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2026-02-19T08:33:27.044362Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3941: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Got transaction status, status: 16 2026-02-19T08:33:27.044541Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3941: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Got transaction status, status: 17 2026-02-19T08:33:27.050866Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4328: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Recv EvWriteResult (external) from ShardID=72075186224037892, Status=STATUS_COMPLETED, TxId=281474976715757, Locks= , Cookie=0 2026-02-19T08:33:27.050901Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4643: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Got completed result TxId=281474976715757, TabletId=72075186224037892, Cookie=0, Locks= 2026-02-19T08:33:27.050958Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:769: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Recv EvWriteResult from ShardID=72075186224037895, Status=STATUS_COMPLETED, TxId=281474976715757, Locks= , Cookie=0 2026-02-19T08:33:27.050975Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1049: SelfId: [4:7608491648467163692:2388], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7608491566862781051:2388]Got completed result TxId=281474976715757, TabletId=72075186224037895, Cookie=0, Mode=2, Locks= 2026-02-19T08:33:27.051002Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4680: SelfId: [4:7608491648467163683:2388], SessionActorId: [4:7608491566862781051:2388], Committed TxId=281474976715757 2026-02-19T08:33:27.377256Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::DmlNoTx >> Yq_1::CreateQuery_Without_Connection [GOOD] >> KqpQueryServiceScripts::ExecuteScript >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> KqpQueryService::CTASWithoutPerStatement >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::ReadManyRangesAndPoints [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2026-02-19T08:32:40.890706Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491444121049338:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.890949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.044328795 598573 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.044439096 598573 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.285141Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.289292Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.289368Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.289486Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.315947Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.316049Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.316108Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.316214Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.316278Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.318251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.325668Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.336986Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.337069Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.337428Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3573 2026-02-19T08:32:41.350665Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.357792Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.374225Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.392143Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.392815Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.410090Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.419636Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.422519Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.422666Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.423801Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.423997Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.450401Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.450480Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.454029Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.458895Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.526277Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3573: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3573 } ] 2026-02-19T08:32:41.526387Z node 1 :YQ_CONTROL_PLANE_STORAG ... Id : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:29.414184Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491658110876359 RawX2: 4503603922340710 } } DstEndpoint { ActorId { RawX1: 7608491658110876360 RawX2: 4503603922340711 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } 2026-02-19T08:33:29.414196Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-02-19T08:33:29.414206Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-02-19T08:33:29.414239Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:29.414248Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:29.414267Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:29.414278Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-02-19T08:33:29.414291Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-02-19T08:33:29.414303Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:29.414311Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:29.414702Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. Recv TEvReadResult from ShardID=72075186224037900, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:29.414725Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. Taken 0 locks 2026-02-19T08:33:29.414733Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:29.414748Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:29.414763Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:29.414775Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:29.414788Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. enter pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:29.414809Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. exit pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-02-19T08:33:29.414823Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. returned 1 rows; processed 1 rows 2026-02-19T08:33:29.414851Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. dropping batch for read #0 2026-02-19T08:33:29.414858Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:29.414867Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:29.414878Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715735, task: 1, CA Id [1:7608491658110876359:2918]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:29.414966Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:29.414982Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876360:2919], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggra215chxkdmkabe4zfj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:29.415060Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7608491658110876360:2919], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggra215chxkdmkabe4zfj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:29.415105Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876360:2919], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggra215chxkdmkabe4zfj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:29.415118Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [1:7608491658110876360:2919], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggra215chxkdmkabe4zfj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:29.415214Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:29.415327Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:29.415342Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [1:7608491658110876359:2918], TxId: 281474976715735, task: 1. Ctx: { TraceId : 01khtggra215chxkdmkabe4zfj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OTA2MWM4MGUtYzkyZTA3ZjEtZmFmODQwOGYtNDE5ZmQzYmE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:29.415458Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; [good] Yq_1::CreateQuery_Without_Connection >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2026-02-19T08:32:40.918403Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491444664727726:2087];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.920616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.056751494 598593 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.056947864 598593 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.238609Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.239485Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.239587Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.254453Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.254530Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.340920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.343906Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.357764Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24424 2026-02-19T08:32:41.370567Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.387473Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.388394Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.389061Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.391059Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.422657Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.435624Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.440881Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.441538Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.462458Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.462667Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.477975Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.478042Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.483914Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.496273Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.499206Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.503716Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.511057Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.520883Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.534665Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.553844Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ] 2026-02-19T08:32:41.556987Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24424 } ... //session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-02-19T08:33:31.375227Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:31.375261Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:33:31.375275Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. enter getasyncinputdata results size 0, freeSpace 8388608 2026-02-19T08:33:31.375290Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2026-02-19T08:33:31.375303Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. All outputs have been finished. Consider finished 2026-02-19T08:33:31.375313Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:31.375327Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:534: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Received TEvResolveKeySetResult update for table 'Root/yq/idempotency_keys' 2026-02-19T08:33:31.375383Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:642: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Processing resolved ShardId# 72075186224037896, partition range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; ()), i: 0, state ranges: 0, points: 1 2026-02-19T08:33:31.375400Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:680: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Add point to new shardId: 72075186224037896 2026-02-19T08:33:31.375477Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:729: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Pending shards States: TShardState{ TabletId: 72075186224037896, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-02-19T08:33:31.375489Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:31.375509Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. BEFORE: 1.0 2026-02-19T08:33:31.375542Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:902: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Send EvRead to shardId: 72075186224037896, tablePath: Root/yq/idempotency_keys, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1771490011403), lockTxId = 281474976715757, lockNodeId = 4 2026-02-19T08:33:31.375564Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:482: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. AFTER: 0.1 2026-02-19T08:33:31.375573Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-02-19T08:33:31.376583Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715757 DataShard: 72075186224037896 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 53, BrokenTxLocks= 2026-02-19T08:33:31.376606Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Taken 1 locks 2026-02-19T08:33:31.376616Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:31.376636Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2026-02-19T08:33:31.376655Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:33:31.376669Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:31.376683Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:31.376704Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 1 freeSpace: 8388586 2026-02-19T08:33:31.376723Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. returned 1 rows; processed 1 rows 2026-02-19T08:33:31.376761Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. dropping batch for read #0 2026-02-19T08:33:31.376771Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:31.376781Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:31.376793Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715757, task: 1, CA Id [4:7608491665321707531:2422]. returned async data processed rows 1 left freeSpace 8388586 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:31.376879Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:31.376933Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-02-19T08:33:31.376947Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491665321707531:2422], TxId: 281474976715757, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggt76fvfrww4jnhs1jn9p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODUwYTY1YzUtYjI4NzA1ZmUtYzM4MzczYjQtMjNlZGYyZg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:31.377102Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715757;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:31.387009Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:769: SelfId: [4:7608491665321707518:2394], Table: `Root/yq/quotas` ([72057594046644480:54:1]), SessionActorId: [4:7608491570832422920:2394]Recv EvWriteResult from ShardID=72075186224037894, Status=STATUS_COMPLETED, TxId=13, Locks= , Cookie=1 2026-02-19T08:33:31.387060Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1049: SelfId: [4:7608491665321707518:2394], Table: `Root/yq/quotas` ([72057594046644480:54:1]), SessionActorId: [4:7608491570832422920:2394]Got completed result TxId=13, TabletId=72075186224037894, Cookie=1, Mode=3, Locks= 2026-02-19T08:33:31.387112Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4680: SelfId: [4:7608491665321707508:2394], SessionActorId: [4:7608491570832422920:2394], Committed TxId=0 2026-02-19T08:33:31.388182Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:769: SelfId: [4:7608491665321707530:2353], Table: `Root/yq/quotas` ([72057594046644480:54:1]), SessionActorId: [4:7608491570832422688:2353]Recv EvWriteResult from ShardID=72075186224037894, Status=STATUS_COMPLETED, TxId=14, Locks= , Cookie=1 2026-02-19T08:33:31.388200Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1049: SelfId: [4:7608491665321707530:2353], Table: `Root/yq/quotas` ([72057594046644480:54:1]), SessionActorId: [4:7608491570832422688:2353]Got completed result TxId=14, TabletId=72075186224037894, Cookie=1, Mode=3, Locks= 2026-02-19T08:33:31.388237Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4680: SelfId: [4:7608491665321707520:2353], SessionActorId: [4:7608491570832422688:2353], Committed TxId=0 >> Yq_1::DeleteQuery [GOOD] >> KqpDocumentApi::RestrictDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: 2026-02-19T08:33:12.654433Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491585006844956:2179];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:12.654499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:13.028300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:13.028428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:13.054511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:13.081880Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491585006844815:2081] 1771489992617314 != 1771489992617317 2026-02-19T08:33:13.085397Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:13.090679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:13.246338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:13.246359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:13.246365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:13.246455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:13.350641Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:13.659401Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:13.684053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:13.690399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:15.841194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491597891747621:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.841963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491597891747612:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.842118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.843124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491597891747631:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.843208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.844848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:15.855090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491597891747626:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:33:15.962704Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491597891747679:2549] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:16.262050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.433759Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491604388606579:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:17.434355Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:17.452043Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:17.514447Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:17.516920Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491604388606529:2081] 1771489997414393 != 1771489997414396 2026-02-19T08:33:17.527929Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:17.528026Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:17.563709Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:17.685300Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:17.712256Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:17.712285Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:17.712292Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:17.712381Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:18.229533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:18.237381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:18.444603Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:20.694780Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491617273509306:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.694864Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.695113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491617273509316:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.695160Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.746466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:20.924756Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491617273509857:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.924853Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.925259Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491617273509862:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.925299Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491617273509863:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:20.925382Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Servi ... 896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:24.154410Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:26.611131Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491641414488001:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.611266Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.611684Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491641414488011:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.611788Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.639516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:26.729339Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491641414488222:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.729425Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.729699Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491641414488227:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.729757Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491641414488228:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.729851Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:26.733812Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:26.745295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-02-19T08:33:26.745538Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491641414488231:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:26.803340Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491641414488282:2667] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:28.293797Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491650861162426:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:28.293857Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:28.350893Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:28.468438Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:28.468518Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:28.472317Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491650861162396:2081] 1771490008293013 != 1771490008293016 2026-02-19T08:33:28.476959Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:28.507208Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:28.588676Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:28.722444Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:28.722471Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:28.722478Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:28.722573Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:29.321846Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:29.353793Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:29.382459Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:32.236757Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491668041032471:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.236847Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.237096Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491668041032480:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.237139Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.265415Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.470991Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491668041032915:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.471151Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.471487Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491668041032920:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.471502Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491668041032921:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.471574Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.475287Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:32.485826Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491668041032924:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:32.581438Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491668041032975:2814] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: 2026-02-19T08:33:06.947720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491557584888411:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:06.950538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:07.020654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:07.337863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:07.351185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:07.351285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:07.403447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:07.429767Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491557584888275:2081] 1771489986910574 != 1771489986910577 2026-02-19T08:33:07.439243Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:07.565425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:07.620563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:07.620609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:07.620625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:07.620733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:07.977845Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:08.236076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:08.253271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:08.308174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.476893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.642290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.729951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:10.610813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491574764759338:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.610944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.614928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491574764759348:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.615025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.020330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.058258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.103702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.165895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.237076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.273078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.317351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.363669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.451725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491579059727523:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.451793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.452361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491579059727529:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.452422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491579059727528:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.452454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.456788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:11.473549Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491579059727532:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:11.548652Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491579059727583:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:11.947674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491557584888411:2158];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:11.947743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... 640760:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:28.131699Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:28.169842Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:28.274523Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:28.274622Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:28.305018Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:28.316848Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:28.370750Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:28.434286Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:28.434310Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:28.434318Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:28.434397Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:28.968704Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:28.976828Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:28.986705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.054885Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.197150Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:29.302755Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.390102Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.250878Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491670566511768:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.250985Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.256025Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491670566511778:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.256123Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.328465Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.371208Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.408144Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.442310Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.477355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.514180Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.552646Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.600041Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.688333Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491670566512649:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.688413Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.688456Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491670566512654:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.688794Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491670566512656:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.688862Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.691433Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:32.700285Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491670566512657:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:32.788574Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491670566512709:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:33.129809Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491653386640760:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:33.129889Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:34.496268Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged-UseStreamIndex [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::LargeUpsert+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2026-02-19T08:32:40.922135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491445218255386:2086];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.923747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.100135948 598566 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.100252550 598566 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.399289Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.399637Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.483681Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.484238Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.484318Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.484378Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.488354Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.494228Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569024Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569114Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569159Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569228Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569341Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569433Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.569806Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25289 2026-02-19T08:32:41.569915Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.579699Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.599844Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.629043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.634136Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.662539Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.662785Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.663938Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.668203Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.747720Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.751084Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.764698Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.764772Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.765622Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ] 2026-02-19T08:32:41.767815Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25289: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25289 } ... ution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:33.470184Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:33.470286Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491672595330291 RawX2: 4503616807242738 } } DstEndpoint { ActorId { RawX1: 7608491672595330292 RawX2: 4503616807242739 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491672595330292 RawX2: 4503616807242739 } } DstEndpoint { ActorId { RawX1: 7608491672595330287 RawX2: 4503616807242269 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:33.470302Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:33.470310Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:33.470324Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:33.470338Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:33.470346Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:33.470378Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:33.470392Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. Taken 0 locks 2026-02-19T08:33:33.470404Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:33.470424Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330291:3058], TxId: 281474976715763, task: 1. Ctx: { TraceId : 01khtggvw96vth8pfgn9x9qc9q. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:33.470440Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330291:3058], TxId: 281474976715763, task: 1. Ctx: { TraceId : 01khtggvw96vth8pfgn9x9qc9q. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:33.470453Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:33.470468Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:33.470482Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:33.470495Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. returned 0 rows; processed 0 rows 2026-02-19T08:33:33.470526Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. dropping batch for read #0 2026-02-19T08:33:33.470537Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:33.470549Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:33.470565Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715763, task: 1, CA Id [4:7608491672595330291:3058]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:33.470600Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491672595330291:3058], TxId: 281474976715763, task: 1. Ctx: { TraceId : 01khtggvw96vth8pfgn9x9qc9q. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:33.470612Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:33.470660Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:33.470685Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330291:3058], TxId: 281474976715763, task: 1. Ctx: { TraceId : 01khtggvw96vth8pfgn9x9qc9q. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:33.470700Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491672595330291:3058], TxId: 281474976715763, task: 1. Ctx: { TraceId : 01khtggvw96vth8pfgn9x9qc9q. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:33.470740Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:33.470771Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491672595330292:3059], TxId: 281474976715763, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggvw96vth8pfgn9x9qc9q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NGRjYWM1NjItMTljMGVkMDQtMjBjYWRlOTktNmQ0ZTBlM2I=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:33.470886Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715763;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:33.470916Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715763;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:33.480361Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:791: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudkqrbsi3ff8c4b7m" } ERROR: {
: Error: (NKikimr::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:673: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: 2026-02-19T08:33:15.812098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491595511481360:2259];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:15.812591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:15.849759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:16.080452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:16.080572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:16.083643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:16.125221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:16.176205Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:16.177289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491595511481130:2081] 1771489995801783 != 1771489995801786 2026-02-19T08:33:16.238550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:16.238569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:16.238577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:16.238665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:16.372011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:16.682998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:16.812723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:18.829481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491608396383918:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:18.829576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:18.829956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491608396383930:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:18.829993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491608396383931:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:18.830096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:18.833074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:18.843012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491608396383934:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:33:18.960620Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491608396383985:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:19.209038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:19.493911Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-02-19T08:33:19.517668Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491612691351560:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:19.518188Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=NjdkNzNiNGUtMmEzMTlmNzktYjc2OGEyM2UtNjA2OGNmOWE=, ActorId: [1:7608491608396383887:2324], ActorState: ExecuteState, LegacyTraceId: 01khtggem3207c9p63dh94wg1a, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 17 } message: "At function: KiReadTable!" end_position { row: 3 column: 17 } severity: 1 issues { position { row: 3 column: 17 } message: "Cannot find table \'db.[/Root/test/Temp]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:19.542865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:19.551317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:19.565304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:19.595377Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491612691351626:2375], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:33:19.597019Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=ZDQ0MDRkYTAtZDNhNmQzOWMtN2Q1YmJiZDItNzJiZjU1Mjc=, ActorId: [1:7608491612691351600:2371], ActorState: ExecuteState, LegacyTraceId: 01khtggepqarw70asbq9cry24x, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 17 } message: "At function: KiReadTable!" end_position { row: 3 column: 17 } severity: 1 issues { position { row: 3 column: 17 } message: "Cannot find table \'db.[/Root/test/Temp]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:20.407271Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491618602848382:2068];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:20.407322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:20.426660Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:20.528762Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:20.544405Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491618602848345:2081] 1771490000406804 != 1771490000406807 2026-02-19T08:33:20.566327Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:20.581549Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:20.581641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:20.586508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:20.712548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:20.722390Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or ... esult=not_found; 2026-02-19T08:33:32.207751Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.207768Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.227471Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.227523Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.227539Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.228309Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.228354Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.228369Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236445Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236445Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236495Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236498Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236514Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.236516Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.245951Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.246006Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.246023Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.249569Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.249622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.249639Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.255641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.255693Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.255710Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.261149Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.261200Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.261218Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.263803Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.263852Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.263867Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.269580Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.269629Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.269646Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.272244Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.272291Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.272308Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-02-19T08:33:32.310069Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491669374002711:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.310174Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.310542Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491669374002716:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.310601Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491669374002719:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.310686Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.315015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:32.328484Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491669374002724:2628], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-02-19T08:33:32.392521Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491669374002775:3217] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:32.438317Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491647899164011:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:32.438368Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateAndAlterTopic [GOOD] Test command err: 2026-02-19T08:32:55.573437Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491509354814423:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.577631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.937850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:55.966337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:55.966450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.068991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.083237Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.105834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491509354814374:2081] 1771489975561991 != 1771489975561994 2026-02-19T08:32:56.174820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.322750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.322773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.322782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.322870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.581912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.115500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.181422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.313044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.458478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.533456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.043021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526534685435:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.043133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.047029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526534685445:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.047095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.655815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.698601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.726819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.781104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.819838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.861629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.916960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.997138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526534686315:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526534686320:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526534686321:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.997831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.001592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.014336Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491526534686324:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.095960Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491530829653673:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.571956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491509354814423:2079];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.572016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.724288Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1336: Reply process error for request 271646720, status: BAD_SESSION, issues: {
: Error: Session not found: ydb://session/3?node_id=1&id=YzMwODczZjktM2NjOTM4YWQtM2VmNDA5Yy1kYmJmMDM3OQ== } 2026-02-19T08:33:04.029185Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelive ... 976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.360388Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.398019Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.431631Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.473277Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.519967Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.576511Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.668248Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491667495032868:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.668363Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.668645Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491667495032873:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.668710Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491667495032874:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.668761Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.673285Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:32.685754Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491667495032877:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:32.725166Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491646020193661:2070];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:32.725278Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:32.755057Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491667495032930:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:34.950442Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:33:35.064470Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:33:35.187894Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491680379936021:2594], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 2026-02-19T08:33:35.188574Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=MWRhM2ZlMGItZjlkZTBmYTQtNTk2YWZmOTUtZGZkYjMxMTY=, ActorId: [4:7608491676084967846:2541], ActorState: ExecuteState, LegacyTraceId: 01khtggxxnenxjmex1by3e6svd, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 78 } message: "Executing ALTER TOPIC" end_position { row: 3 column: 78 } severity: 1 issues { message: "path \'Root/NoSuchTopic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } } tx_id# trace_id# Query failed, status: GENERIC_ERROR:
: Error: Execution, code: 1060
:3:78: Error: Executing ALTER TOPIC
: Error: path 'Root/NoSuchTopic' does not exist or you do not have access rights, code: 500018 Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771490012726, tx_id: 281474976710670 } } Scheme entry: { name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771490008064, tx_id: 281474976715657 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009520, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009177, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012509, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012551, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012362, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012397, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012432, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009296, tx_id: 281474976710660 } } Scheme entry: { name: ReorderKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012607, tx_id: 281474976710668 } } Scheme entry: { name: ReorderOptionalKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012684, tx_id: 281474976710669 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1771490014896, tx_id: 281474976710673 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012467, tx_id: 281474976710665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490008974, tx_id: 281474976710658 } } Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771490012726, tx_id: 281474976710670 } } Scheme entry: { name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1771490008064, tx_id: 281474976715657 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009520, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009177, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012509, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012551, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012362, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012397, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012432, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490009296, tx_id: 281474976710660 } } Scheme entry: { name: ReorderKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012607, tx_id: 281474976710668 } } Scheme entry: { name: ReorderOptionalKey, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012684, tx_id: 281474976710669 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1771490014896, tx_id: 281474976710673 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490012467, tx_id: 281474976710665 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1771490008974, tx_id: 281474976710658 } } >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CTAS_BadKey+IsOlap >> Yq_1::ModifyQuery [GOOD] >> KqpQueryService::DmlNoTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: 2026-02-19T08:33:12.169326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491584452375537:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:12.169421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:12.441793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:12.473072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:12.473194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:12.556904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:12.561468Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:12.568175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491584452375398:2081] 1771489992125254 != 1771489992125257 2026-02-19T08:33:12.681582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:12.681607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:12.681614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:12.681693Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:12.729790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:13.130729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:13.138350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:13.194840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.200538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:13.368435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.516028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.597596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.472064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491597337279175:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.472162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.476257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491597337279185:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.476335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.804659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.843253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.880771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.918503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.955815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.989962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.033560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.110095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.196847Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491601632247352:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.196910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.196914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491601632247357:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.197034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491601632247359:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.197088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.200587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:16.211203Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491601632247361:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:16.283962Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491601632247412:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:17.173959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491584452375537:2159];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:17.174048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:19.034760Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491612024400961:2083];send_to=[0:7307199536658146131:7762515 ... 31:7762515]; 2026-02-19T08:33:30.187536Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:30.188572Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:30.298113Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:30.302102Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491660156618568:2081] 1771490010113928 != 1771490010113931 2026-02-19T08:33:30.302172Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:30.308887Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:30.308960Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:30.331201Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:30.449985Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:30.450008Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:30.450017Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:30.450094Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:30.480075Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:30.917308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:30.924724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:30.937668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.014691Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.138962Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:31.189664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.272512Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.677548Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491673041522326:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.677648Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.678077Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491673041522336:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.678139Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.747754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.793833Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.841872Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.882794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.924699Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.968521Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.006772Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.060236Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.210341Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491677336490503:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.210490Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.210821Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491677336490508:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.210858Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491677336490509:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.210951Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.215123Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:34.229682Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:33:34.229829Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491677336490512:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:34.308641Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491677336490563:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:35.128000Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491660156618802:2266];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:35.128081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: 2026-02-19T08:33:06.735520Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491558763667718:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:06.738345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:07.157869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:07.196807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:07.196906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:07.270065Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:07.282983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:07.450225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:07.522195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:07.522216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:07.522222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:07.522301Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:07.740581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:08.149234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:08.155171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:08.238770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.399413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.579996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:08.657476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:10.532207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491575943538726:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.532330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.532637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491575943538736:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.532707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:10.879497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:10.923737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:10.969847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.028207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.068958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.127430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.180224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.244174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:11.328150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491580238506905:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.328237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.328557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491580238506910:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.328595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491580238506911:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.328696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:11.332574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:11.347923Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491580238506914:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:11.441961Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491580238506965:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:11.715765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491558763667718:2088];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:11.715847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:13.047994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.050032Z node 1 :FLAT_TX_ ... re/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:28.302011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:28.483452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:28.558505Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:28.566661Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.050152Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491663369845765:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.050248Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.050625Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491663369845775:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.050675Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.121628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.153827Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.186489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.230256Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.272320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.319725Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.365457Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.429424Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.531743Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491663369846651:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.531843Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.532202Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491663369846656:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.532249Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491663369846657:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.532363Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.536584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:31.558747Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491663369846660:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:31.661650Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491663369846713:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:32.551655Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491646189974748:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:32.551719Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:33.356036Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.357948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.359618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.505830Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:33.512295Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:33.545693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:33.555300Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:33.555819Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:33.556167Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryService::StreamExecuteQuery >> KqpService::ToDictCache+UseCache >> KqpQueryServiceScripts::ExecuteScriptWithParameters ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2026-02-19T08:32:40.895203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491446790192732:2152];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:40.897085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.047665556 598556 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.047788710 598556 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.240284Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.279411Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.282088Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.315759Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.334781Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19748 2026-02-19T08:32:41.334899Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.334954Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.334997Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.335038Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336477Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336543Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336609Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336638Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336680Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336752Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.336779Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.338125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:41.340804Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.370095Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.378182Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.391714Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.394422Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.432133Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.450625Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.456661Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.480221Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.505218Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.505687Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.505745Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 2026-02-19T08:32:41.509046Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19748: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19748 } ] 20 ... YtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:36.596930Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:36.597020Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491685120221463 RawX2: 4503616807242772 } } DstEndpoint { ActorId { RawX1: 7608491685120221464 RawX2: 4503616807242773 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491685120221464 RawX2: 4503616807242773 } } DstEndpoint { ActorId { RawX1: 7608491685120221459 RawX2: 4503616807242097 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:36.597034Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:36.597042Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:36.597054Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:36.597069Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:36.597077Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:36.598370Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. Recv TEvReadResult from ShardID=72075186224037898, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:36.598395Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. Taken 0 locks 2026-02-19T08:33:36.598407Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:36.598430Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221463:3092], TxId: 281474976715761, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:36.598450Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221463:3092], TxId: 281474976715761, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:36.598468Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:36.598485Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. enter pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:36.598514Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. exit pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 1 freeSpace: 8387492 2026-02-19T08:33:36.598537Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. returned 1 rows; processed 1 rows 2026-02-19T08:33:36.598575Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. dropping batch for read #0 2026-02-19T08:33:36.598586Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:36.598599Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:36.598614Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976715761, task: 1, CA Id [4:7608491685120221463:3092]. returned async data processed rows 1 left freeSpace 8387492 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:36.598744Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491685120221463:3092], TxId: 281474976715761, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:36.598766Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:36.598882Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:36.598944Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:36.598959Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491685120221464:3093], TxId: 281474976715761, task: 2. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:36.599111Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715761;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:36.599277Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7608491685120221463:3092], TxId: 281474976715761, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:36.599307Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [4:7608491685120221463:3092], TxId: 281474976715761, task: 1. Ctx: { CheckpointId : . TraceId : 01khtggyyf3kap98msm09n2yec. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjA0MmRkZmQtYTYwZWUyOWYtMzNhNWE2Y2UtNzljNjFkZjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:36.599459Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715761;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:36.646975Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:62565: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:62565 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: 2026-02-19T08:33:10.779697Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491572819108831:2250];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:10.779776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:11.213813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:11.217470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:11.217595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:11.339883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:11.401834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491572819108586:2081] 1771489990714738 != 1771489990714741 2026-02-19T08:33:11.420350Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:11.524880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:11.534210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:11.534234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:11.534246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:11.537878Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:11.785996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:12.016743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:12.023449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:12.085279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:12.204701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:12.362540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:12.434708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.369188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491589998979657:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.369752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.370485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491589998979667:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.370555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:14.723198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.754477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.789481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.823124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.860549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.920399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.958962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.005012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:15.082581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491594293947836:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.082657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.082972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491594293947841:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.083016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491594293947842:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.083145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:15.087987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:15.102061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491594293947845:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:15.206914Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491594293947896:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:15.779248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491572819108831:2250];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:15.780544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:18.181831Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, p ... n_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZTc0Zjc2YjMtOTk2N2JkNDEtMjBjYzA5MjktZTE2NTlkOWY=, ActorId: [3:7608491661891648769:2547], ActorState: ExecuteState, LegacyTraceId: 01khtggs836w6mhgfnjr2va60f, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional\n" severity: 1 } trace_id# 2026-02-19T08:33:31.609472Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491665068241207:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:31.614815Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:31.629554Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:31.762670Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:31.762765Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:31.798356Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:31.810506Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:31.860371Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:31.934339Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:31.934361Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:31.934370Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:31.934460Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:32.487701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:32.492811Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:32.502020Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.618455Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.626368Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:32.784856Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:32.892895Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.599285Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491682248112224:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.599401Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.600622Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491682248112234:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.600681Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.668506Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.710004Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.755326Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.798043Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.853944Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.891511Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.942823Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.996747Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:36.140117Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491686543080398:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.140238Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.140610Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491686543080403:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.140663Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491686543080404:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.140805Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.144586Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:36.156372Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491686543080407:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:36.251813Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491686543080458:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:36.607419Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491665068241207:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:36.607488Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNamedExpressions::NamedExpressionRandomUpsertReturning+UseSink+UseDataQuery [GOOD] >> KqpNamedExpressions::NamedExpressionRandomSelect+UseSink >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: 2026-02-19T08:32:59.689156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491527681056829:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:59.691041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:59.925132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:59.925245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:59.962385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:59.990248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:59.990827Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:59.991874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491527681056784:2081] 1771489979683116 != 1771489979683119 2026-02-19T08:33:00.107046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:00.107064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:00.107070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:00.107136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:00.180317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:00.475875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:00.482981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:00.552835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.684144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.693532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:00.844592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.911928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:02.892671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491540565960547:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:02.892774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:02.893134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491540565960556:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:02.893176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.246518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.290324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.323845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.370523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.408867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.492369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.568507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.638001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:03.765758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491544860928730:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.765838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.766165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491544860928735:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.766233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491544860928736:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.766349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:03.774522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:03.795214Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491544860928739:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:03.883588Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491544860928791:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:04.684482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491527681056829:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:04.684534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:06.623140Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491557981200397:2190];send_to=[0:7307199536658146131:7762515 ... peration_create_resource_pool.cpp:179) 2026-02-19T08:33:32.896061Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491667816144331:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:32.978625Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491667816144382:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:33.151274Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491650636272551:2189];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:33.151343Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:34.923599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.934016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.936528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.049502Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.075296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.088120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.096023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.096525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.097020Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:35.352495Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491680701046887:2573], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2026-02-19T08:33:35.355493Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=ZDI4NDM1ODEtZjc2MDk3ZWItN2U1N2ZiZmUtMTM2ZWExM2Q=, ActorId: [3:7608491680701046877:2568], ActorState: ExecuteState, LegacyTraceId: 01khtggxp271je8x60s26fr5zh, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 13 } message: "At function: Commit!" end_position { row: 3 column: 13 } severity: 1 issues { position { row: 3 column: 13 } message: "COMMIT not supported inside YDB query" end_position { row: 3 column: 13 } issue_code: 2008 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:35.356066Z node 3 :KQP_EXECUTER WARN: kqp_run_script_actor.cpp:700: TRunScriptActor [3:7608491680701046802:4129]. Ctx: { TraceId: 01khtggxp271je8x60s26fr5zh, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDI4NDM1ODEtZjc2MDk3ZWItN2U1N2ZiZmUtMTM2ZWExM2Q=, PoolId: default, CustomerSuppliedId: 01khtggxp271je8x60s26fr5zh, CurrentExecutionId: 2ccf0184-b2435c62-904e472b-22c8159b, RunScriptActorId: [3:7608491680701046802:4129], IsStreamingQuery: 0}. LeaseGeneration: 1. RunState: 1. Script query failed from [3:7608491650636272597:2226] GENERIC_ERROR, Issues: {
: Error: Type annotation, code: 1030 subissue: {
:3:13: Error: At function: Commit! subissue: {
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 } } } 2026-02-19T08:33:35.955754Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2026-02-19T08:33:35.955936Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:33:35.956993Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-02-19T08:33:35.958101Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:981: SelfId: [3:7608491680701046949:2581], Table: `/Root/.metadata/script_executions` ([72057594046644480:54:1]), SessionActorId: [3:7608491680701046903:2581]Got LOCKS BROKEN for table `/Root/.metadata/script_executions`. ShardID=72075186224037927, Sink=[3:7608491680701046949:2581].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-02-19T08:33:35.958214Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491680701046932:2581], SessionActorId: [3:7608491680701046903:2581], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/.metadata/script_executions`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7608491680701046903:2581]. 2026-02-19T08:33:35.958382Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=M2U4MTJjZGMtOWIzYjAwZTItMjQ4YmI2YzQtNzMzNjM2Y2E=, ActorId: [3:7608491680701046903:2581], ActorState: ExecuteState, LegacyTraceId: 01khtggy42dbbgkfmz4whmqdvp, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7608491680701046933:2581] from: [3:7608491680701046932:2581] trace_id# 2026-02-19T08:33:35.958486Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491680701046933:2581] TxId: 281474976710684. Ctx: { TraceId: 01khtggy42dbbgkfmz4whmqdvp, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2U4MTJjZGMtOWIzYjAwZTItMjQ4YmI2YzQtNzMzNjM2Y2E=, PoolId: , IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# [ {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/script_executions`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ] trace_id# 2026-02-19T08:33:35.958775Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=M2U4MTJjZGMtOWIzYjAwZTItMjQ4YmI2YzQtNzMzNjM2Y2E=, ActorId: [3:7608491680701046903:2581], ActorState: ExecuteState, LegacyTraceId: 01khtggy42dbbgkfmz4whmqdvp, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/.metadata/script_executions`." issue_code: 2001 severity: 1 }{ message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } trace_id# 2026-02-19T08:33:35.962233Z node 3 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [3:7608491680701046897:4187], ActorId: [3:7608491680701046899:4189], TraceId: ExecutionId: 2ccf0184-b2435c62-904e472b-22c8159b, RequestDatabase: /Root, LeaseGeneration: 1, Finish with ABORTED, Issues: [ {
: Error: Transaction locks invalidated. Table: `/Root/.metadata/script_executions`., code: 2001 } {
: Error: Operation is aborting because locks are not valid, code: 2001 } ], SessionId: ydb://session/3?node_id=3&id=M2U4MTJjZGMtOWIzYjAwZTItMjQ4YmI2YzQtNzMzNjM2Y2E=, TxId: 01khtggyp3578rpmd3j3g13wsr 2026-02-19T08:33:37.209875Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491689290982161:2840], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2026-02-19T08:33:37.210397Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=NDI4ZGZmMzktNTgyOTQ4M2ItYzk3NzgyMTUtNGU4YTM2Mzk=, ActorId: [3:7608491689290982157:2838], ActorState: ExecuteState, LegacyTraceId: 01khtggzwr40emrzdn0k7rytnx, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 13 } message: "At function: Commit!" end_position { row: 3 column: 13 } severity: 1 issues { position { row: 3 column: 13 } message: "ROLLBACK not supported inside YDB query" end_position { row: 3 column: 13 } issue_code: 2008 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:33:37.210727Z node 3 :KQP_EXECUTER WARN: kqp_run_script_actor.cpp:700: TRunScriptActor [3:7608491689290982134:4398]. Ctx: { TraceId: 01khtggzwr40emrzdn0k7rytnx, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NDI4ZGZmMzktNTgyOTQ4M2ItYzk3NzgyMTUtNGU4YTM2Mzk=, PoolId: default, CustomerSuppliedId: 01khtggzwr40emrzdn0k7rytnx, CurrentExecutionId: 8511c49a-99d46c7f-b9f63f0a-9460f272, RunScriptActorId: [3:7608491689290982134:4398], IsStreamingQuery: 0}. LeaseGeneration: 1. RunState: 1. Script query failed from [3:7608491650636272597:2226] GENERIC_ERROR, Issues: {
: Error: Type annotation, code: 1030 subissue: {
:3:13: Error: At function: Commit! subissue: {
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 } } } >> BasicUsage::CreateTopicWithCustomAdvancedMonitoringSettings [GOOD] >> KqpQueryService::TableSink_OltpLiteralUpsert >> KqpQueryService::Followers [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryService::CTAS_BadKey+IsOlap [GOOD] >> KqpQueryService::CTAS_BadKey-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::CreateTopicWithCustomAdvancedMonitoringSettings [GOOD] Test command err: 2026-02-19T08:32:28.207827Z :BrokenCredentialsProvider INFO: Random seed for debugging is 1771489948207798 2026-02-19T08:32:28.690375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491395671464554:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:28.690537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:28.763533Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:32:28.776148Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491392489237526:2092];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:28.776527Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:28.789535Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-02-19T08:32:28.906393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:28.914013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:29.067391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:29.067510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:29.069037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:29.069100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:29.088099Z node 1 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-02-19T08:32:29.088353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:29.100304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:29.147179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:29.190570Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:29.200366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:29.458482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ynks/0030b2/r3tmp/yandexM52KSA.tmp 2026-02-19T08:32:29.458530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ynks/0030b2/r3tmp/yandexM52KSA.tmp 2026-02-19T08:32:29.458708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ynks/0030b2/r3tmp/yandexM52KSA.tmp 2026-02-19T08:32:29.458814Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:29.634767Z INFO: TTestServer started on Port 6212 GrpcPort 6709 2026-02-19T08:32:29.696131Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:29.784143Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:6709 2026-02-19T08:32:29.805984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:31.978926Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491405374139717:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:31.978957Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491405374139725:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:31.979083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:31.979382Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491405374139732:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:31.979449Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:31.986285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:32.005121Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7608491405374139731:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-02-19T08:32:32.141449Z node 2 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [2:7608491409669107057:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:32.447338Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [1:7608491412851335075:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:32:32.449392Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=1&id=MzA0OGE0ZjEtZTUyMzQ2YWYtODBiNjc1OGYtNGU0NGJjNw==, ActorId: [1:7608491412851335032:2334], ActorState: ExecuteState, LegacyTraceId: 01khtgf0af1vw48wy8ycvrstjv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:32.451013Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [2:7608491409669107072:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-02-19T08:32:32.451023Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:32:32.451403Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=2&id=MTkyNzVkNjQtZmQxZDZmMTUtODI3ZDM1NzEtM2IwYTVjOWE=, ActorId: [2:7608491405374139715:2306], ActorState: ExecuteState, LegacyTraceId: 01khtgf07812w36txjtx11vf8c, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-02-19T08:32:32.451746Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } EffectivePoolId: "default" } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-02-19T08:32:32.463331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__o ... node 10 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1771490019" Version: 2 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1771490019" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1771490019" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 MetricsLevel: 3 MonitoringProjectId: "mon" } MonitoringProjectId: "" 2026-02-19T08:33:39.374407Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:33:39.399706Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-02-19T08:33:39.405626Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.405656Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.405670Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.405689Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.405704Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.415771Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2026-02-19T08:33:39.417823Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-02-19T08:33:39.427169Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1771490019] updating configuration. Deleted partitions []. Added partitions [] 2026-02-19T08:33:39.428283Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3668: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-02-19T08:33:39.431601Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1060: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-02-19T08:33:39.437893Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.437923Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.437937Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.437958Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.437973Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.442044Z node 10 :PERSQUEUE DEBUG: partition.cpp:1295: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1771490019481, TxId 281474976710676 2026-02-19T08:33:39.442086Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.442104Z node 10 :PERSQUEUE DEBUG: partition.cpp:2445: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-02-19T08:33:39.442137Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:39.442148Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.442166Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:39.442178Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.442473Z node 10 :PERSQUEUE DEBUG: partition.cpp:1439: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1771490019481, TxId 281474976710676 2026-02-19T08:33:39.442506Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.442521Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:39.442536Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.442559Z node 10 :PERSQUEUE DEBUG: partition.cpp:2509: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-02-19T08:33:39.442730Z node 10 :PERSQUEUE DEBUG: partition.cpp:3984: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976710676 2026-02-19T08:33:39.442754Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:33:39.442770Z node 10 :PERSQUEUE DEBUG: partition.cpp:2371: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-02-19T08:33:39.442789Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.443042Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-02-19T08:33:39.449206Z node 10 :PERSQUEUE DEBUG: partition.cpp:2182: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:33:39.449540Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:33:39.449691Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:33:39.449730Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.449748Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.449762Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.449780Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.450094Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.450131Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-02-19T08:33:39.450573Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:400: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1771490019" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1771490019" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1771490019" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } MonitoringProjectId: "" 2026-02-19T08:33:39.450617Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:33:39.476701Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-02-19T08:33:39.505687Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.505741Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.505757Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.505779Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.505794Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037894][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.538457Z node 10 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:39.538497Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.538516Z node 10 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:39.538541Z node 10 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:39.538559Z node 10 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037892][Partition][0][StateIdle] Try persist 2026-02-19T08:33:39.822922Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1270} ActorId: [9:7608491700897179521:2482] TxId: 281474976710677. Ctx: { TraceId: 01khtgh21r9kk4963t4vj6jxrj, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NWZlYmI1ODItODcwYzcxYmEtMjIyZWFkNGItNDdkNmY3NmY=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-02-19T08:33:39.823119Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [9:7608491700897179525:2482], TxId: 281474976710677, task: 3. Ctx: { CheckpointId : . TraceId : 01khtgh21r9kk4963t4vj6jxrj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=9&id=NWZlYmI1ODItODcwYzcxYmEtMjIyZWFkNGItNDdkNmY3NmY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [9:7608491700897179521:2482], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityExtendTimeout [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityBatchInvalid >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] Test command err: 2026-02-19T08:32:21.554793Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491364848107706:2267];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:21.554937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:21.799180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:21.841570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:21.841698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:21.873119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:21.954045Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:21.955116Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491364848107458:2081] 1771489941537840 != 1771489941537843 2026-02-19T08:32:21.969096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:22.117254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:22.117356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:22.117365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:22.117440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:22.495027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:22.553697Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:22.687552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-02-19T08:32:22.693454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:22.753145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.878826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.921429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.963846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:22.996575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.025761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.057100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.086806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.119137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:23.148864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:24.433966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491377733011011:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.433972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491377733011001:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.434084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.434342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491377733011016:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.434420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:24.437825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:32:24.448435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491377733011015:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-02-19T08:32:24.515027Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491377733011068:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:32:25.222963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.248778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.273587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.298058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.324385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:25.347205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... s user action and tx pending commits 2026-02-19T08:33:43.442921Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.442936Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:43.542847Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:43.542891Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.542905Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:43.542927Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.542944Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:43.644170Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:43.644215Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.644232Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:43.644254Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.644269Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:43.745833Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:43.745871Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.745886Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:43.745904Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.745919Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:43.846036Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:43.846069Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.846083Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:43.846100Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.846114Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:43.946379Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:43.946415Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.946434Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:43.946456Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:43.946471Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:44.049836Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:44.049874Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.049888Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:44.049907Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.049922Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:44.150074Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:44.150121Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.150139Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:44.150163Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.150190Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:44.238836Z node 7 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 1 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 77 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 1 WaitingLockingDurationValues: 1 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 WaitingLockingDurationValues: 0 2026-02-19T08:33:44.240378Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:148: Handle TEvRequest topic: 'topic1' requestId: 2026-02-19T08:33:44.240414Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2718: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-02-19T08:33:44.240526Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 6 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-02-19T08:33:44.240776Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 6 added 1 blobs, size 71 count 1 last offset 0, current partition end offset: 1 2026-02-19T08:33:44.240808Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 6. Send blob request. 2026-02-19T08:33:44.240855Z node 7 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 71 accessed 3 times before, last time 2026-02-19T08:33:41.000000Z 2026-02-19T08:33:44.240893Z node 7 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 6. All 1 blobs are from cache. 2026-02-19T08:33:44.240928Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-02-19T08:33:44.241039Z node 7 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 53 from pos 0 cbcount 1 2026-02-19T08:33:44.241136Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:183: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-02-19T08:33:44.242024Z node 7 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-02-19T08:33:44.242394Z node 7 :SQS INFO: http_req.cpp:1237: Request [30b61955-5a3aa350-52be016b-db705364] Request done. Api [SqsTopic] Action [ReceiveMessage] User [AKIDEXAMPLE] Database [/Root] Queue [topic1] IP [189a:d98f:4e7c:0:9a:d98f:4e7c:0] Duration [2707ms] Code [200] 2026-02-19T08:33:44.242414Z node 7 :HTTP_PROXY INFO: http_req.cpp:1667: http request [ReceiveMessage] requestId [30b61955-5a3aa350-52be016b-db705364] reply ok 2026-02-19T08:33:44.242705Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:331: (#38,[::1]:37778) <- (200 , 211 bytes) 2026-02-19T08:33:44.242792Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:392: (#38,[::1]:37778) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1771489991240"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} jsonReceived = { "Messages": [ { "Attributes": { "SentTimestamp":"1771489991240" }, "Body":"MessageBody-0", "MD5OfBody":"94a29778a1f1f41bf68142847b2e6106", "MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125", "ReceiptHandle":"CAAQAA==" } ] } 2026-02-19T08:33:44.252588Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:44.252624Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.252639Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:44.252661Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.252680Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist 2026-02-19T08:33:44.352247Z node 7 :PERSQUEUE DEBUG: partition.cpp:2358: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-02-19T08:33:44.352281Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.352293Z node 7 :PERSQUEUE DEBUG: partition.cpp:2366: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:44.352308Z node 7 :PERSQUEUE DEBUG: partition.cpp:2417: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-02-19T08:33:44.352321Z node 7 :PERSQUEUE DEBUG: partition.cpp:2376: [72075186224037907][Partition][0][StateIdle] Try persist >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: 2026-02-19T08:33:13.454073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491585558919833:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:13.455652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:13.737931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:13.738026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:13.781395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:13.807569Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491585558919789:2081] 1771489993451867 != 1771489993451870 2026-02-19T08:33:13.819437Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:13.822675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:13.959698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:14.008804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:14.008821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:14.008827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:14.008906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:14.459027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:14.466266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:14.469544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:14.517299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.689310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.868295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.936413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.909319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598443823556:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.909459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.909868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598443823567:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.909947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.264966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.313195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.350788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.382381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.409360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.439125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.506476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.553429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:17.644748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491602738791732:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.644827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.645152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491602738791737:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.645197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491602738791738:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.645291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:17.648493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:17.663509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-02-19T08:33:17.663715Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491602738791741:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:17.747511Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491602738791792:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:18.453772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491585558919833:2075];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:18.453855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:19.237404Z ... re/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.178918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.302822Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:35.345792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.425978Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.038903Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491694697884606:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.038979Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.039426Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491694697884616:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.039471Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.116702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.191303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.227766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.267942Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.326336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.370050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.419830Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.482515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.585285Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491694697885498:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.585389Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.585643Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491694697885504:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.585686Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491694697885503:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.585760Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.589678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:38.603902Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491694697885507:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:38.691061Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491694697885558:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:39.276338Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491677518013728:2202];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:39.276395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:40.473024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.475355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.477115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.612615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.621794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.628477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.632759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.633149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.633441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) >> KqpQueryService::CTAS_BadKey-IsOlap [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::ExecuteRetryQuery >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:135:2058] recipient: [1:113:2144] Leader for TabletID 72057594046316545 is [1:137:2159] sender: [1:140:2058] recipient: [1:114:2145] 2026-02-19T08:32:28.640952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8101: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-02-19T08:32:28.641049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8129: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.641124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8162: ForcedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10, ForcedCompactionPersistBatchSize# 100, ForcedCompactionPersistBatchMaxTime# 0.100000s 2026-02-19T08:32:28.641169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8009: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-02-19T08:32:28.641215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8025: OperationsProcessing config: using default configuration 2026-02-19T08:32:28.641267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-02-19T08:32:28.641304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8031: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-02-19T08:32:28.641333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8279: ConditionalErase configured: ShardsInFlight (for table) 0, BatchSize 100, BatchMaxTime 0.100000s 2026-02-19T08:32:28.641398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8194: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-02-19T08:32:28.641491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-02-19T08:32:28.642432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-02-19T08:32:28.642793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-02-19T08:32:28.783391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8315: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-02-19T08:32:28.783465Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:28.784310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8265: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:180:2058] recipient: [1:15:2062] 2026-02-19T08:32:28.795463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-02-19T08:32:28.795538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-02-19T08:32:28.795663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-02-19T08:32:28.815072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-02-19T08:32:28.815622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-02-19T08:32:28.816352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-02-19T08:32:28.817076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-02-19T08:32:28.821739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.821928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-02-19T08:32:28.823117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-02-19T08:32:28.823177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-02-19T08:32:28.823470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-02-19T08:32:28.823523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-02-19T08:32:28.823568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-02-19T08:32:28.823714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7270: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-02-19T08:32:29.002126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.003940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.004021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-02-19T08:32:29.004177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken ... e TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-02-19T08:33:47.406717Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:47.406861Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-02-19T08:33:47.406993Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1157 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-02-19T08:33:47.407441Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6358: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-02-19T08:33:47.407531Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-02-19T08:33:47.407566Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-02-19T08:33:47.407603Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-02-19T08:33:47.407644Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-02-19T08:33:47.407729Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-02-19T08:33:47.408238Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6010: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-02-19T08:33:47.408281Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1843: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-02-19T08:33:47.408631Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-02-19T08:33:47.408704Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-02-19T08:33:47.408801Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1089: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 741 RawX2: 219043334822 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-02-19T08:33:47.408867Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:675: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-02-19T08:33:47.408907Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:712: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-02-19T08:33:47.408946Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:724: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-02-19T08:33:47.408995Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1003:0 ProposedWaitParts -> Done 2026-02-19T08:33:47.416248Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-02-19T08:33:47.416506Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-02-19T08:33:47.416671Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-02-19T08:33:47.416797Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-02-19T08:33:47.416841Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-02-19T08:33:47.416942Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-02-19T08:33:47.416976Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-02-19T08:33:47.417019Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-02-19T08:33:47.417052Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-02-19T08:33:47.417091Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-02-19T08:33:47.417134Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-02-19T08:33:47.417175Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-02-19T08:33:47.417206Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1003:0 2026-02-19T08:33:47.417347Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-02-19T08:33:47.420413Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:248: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-02-19T08:33:47.420461Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:294: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-02-19T08:33:47.420814Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-02-19T08:33:47.420895Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:265: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-02-19T08:33:47.420928Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:274: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:858:2829] TestWaitNotification: OK eventTxId 1003 2026-02-19T08:33:47.421356Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-02-19T08:33:47.421555Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 236us result status StatusSuccess 2026-02-19T08:33:47.422115Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::BrokenCredentialsProvider >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2026-02-19T08:32:43.702844Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491460338364490:2232];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:43.702889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:44.023260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491460338364286:2081] 1771489963688687 != 1771489963688690 2026-02-19T08:32:44.034381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:44.034475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:44.036587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:44.126031Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:44.465287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:44.465309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:44.465329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:44.465413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:44.697984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:44.706580Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:44.707194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:46.542688Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.557109Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:474: Subscribed for config changes. 2026-02-19T08:32:46.557164Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:481: Updated table service config. 2026-02-19T08:32:46.557195Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1568: Updated YQL logs priority to current level: 4 2026-02-19T08:32:46.558971Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.558971Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.558984Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Creating table 2026-02-19T08:32:46.558989Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Creating table 2026-02-19T08:32:46.559037Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.559056Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.559178Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Describe result: PathErrorUnknown 2026-02-19T08:32:46.559183Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Creating table 2026-02-19T08:32:46.559216Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-02-19T08:32:46.567778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.570132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.571709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:46.577025Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-02-19T08:32:46.577069Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Subscribe on create table tx: 281474976710658 2026-02-19T08:32:46.577137Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-02-19T08:32:46.577157Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Subscribe on create table tx: 281474976710659 2026-02-19T08:32:46.577587Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-02-19T08:32:46.577595Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Subscribe on create table tx: 281474976710660 2026-02-19T08:32:46.582196Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Subscribe on tx: 281474976710658 registered 2026-02-19T08:32:46.582223Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Subscribe on tx: 281474976710659 registered 2026-02-19T08:32:46.582230Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Subscribe on tx: 281474976710660 registered 2026-02-19T08:32:46.717392Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-02-19T08:32:46.773079Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7608491473223267016:2490] Owner: [1:7608491473223267013:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-02-19T08:32:46.773828Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-02-19T08:32:46.775001Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2026-02-19T08:32:46.775051Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Column diff is empty, finishing 2026-02-19T08:32:46.776070Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-02-19T08:32:46.777018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:32:46.778884Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-02-19T08:32:46.778906Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7608491473223267015:2489] Owner: [1:7608491473223267013:2487]. Successful alter request: ExecComplete 2026-02-19T08:32:46.854022Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Table already exists, number of columns: 34, has SecurityObject: true 2026-02-19T08:32:46.854121Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Column diff is empty, finishing 2026-02-19T08:32:46.854215Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7608491473223267014:2488] Owner: [1:7608491473223267013:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-02-19T08:32:46.855131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemesh ... SaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, user_group_sids, script_sinks, script_secret_names, retry_state, start_ts, graph_compressed IS NOT NULL AS has_graph FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-02-19T08:33:46.184131Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [4:7608491727756690033:2624] 2026-02-19T08:33:46.184161Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 60 timeout: 300.000000s actor id: [4:7608491727756690035:3121] 2026-02-19T08:33:46.193137Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 60, sender: [4:7608491727756690034:2625], selfId: [4:7608491676217080580:2217], source: [4:7608491727756690033:2624] 2026-02-19T08:33:46.193939Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, TxId: 01khtgh8p86jws7gne6822wcc3 2026-02-19T08:33:46.194426Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4273: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, Do finalization with status UNAVAILABLE, exec status: EXEC_STATUS_ABORTED, finalization status (applicate effect: 0): 1, issues: {
: Error: Lease expired }, retry deadline (wait retry: 0): 2026-02-19T08:33:46.194327Z, lease state: 1 2026-02-19T08:33:46.194867Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, RunDataQuery with SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, TxId: 01khtgh8p86jws7gne6822wcc3, text: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $stats AS JsonDocument; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; DECLARE $retry_state AS JsonDocument; DECLARE $retry_deadline AS Timestamp; DECLARE $lease_state AS Int32; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, end_ts = CurrentUtcTimestamp(), stats = $stats, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL), retry_state = $retry_state WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2026-02-19T08:33:46.195331Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 61, targetId: [4:7608491727756690033:2624] 2026-02-19T08:33:46.195364Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 61 timeout: 300.000000s actor id: [4:7608491727756690052:3126] 2026-02-19T08:33:46.211953Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: Forwarded response to sender actor, requestId: 61, sender: [4:7608491727756690051:2631], selfId: [4:7608491676217080580:2217], source: [4:7608491727756690033:2624] 2026-02-19T08:33:46.212995Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, TxId: 2026-02-19T08:33:46.213125Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, TxId: 2026-02-19T08:33:46.213169Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4347: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690028:2620], ActorId: [4:7608491727756690029:2621], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-02-19T08:33:46.213358Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [4:7608491727756690027:2619], ActorId: [4:7608491727756690028:2620], TraceId: ExecutionId: c6a71893-d0af428-e39455e0-ec79de12, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [4:7608491727756690029:2621] SUCCESS 2026-02-19T08:33:46.213656Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1476: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7608491723461722614:3072] ActorId: [4:7608491723461722656:3089] Database: /dc-1 ExecutionId: c6a71893-d0af428-e39455e0-ec79de12. Successfully finalized script execution operation, WaitingRetry: 0 2026-02-19T08:33:46.213691Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1823: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7608491723461722614:3072] ActorId: [4:7608491723461722656:3089] Database: /dc-1 ExecutionId: c6a71893-d0af428-e39455e0-ec79de12. Reply success 2026-02-19T08:33:46.213770Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4878: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7608491714871787896:3002] ActorId: [4:7608491723461722614:3072]. Lease check #0 [4:7608491723461722659:3091] successfully completed, OperationsToCheck: 0 2026-02-19T08:33:46.213797Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4890: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7608491714871787896:3002] ActorId: [4:7608491723461722614:3072]. Finish, success: 1, issues: 2026-02-19T08:33:46.213830Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:79: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2026-02-19T08:33:46.214132Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=OGY5YzkzM2ItY2UzYzk2ODktNjA0YmI4MjMtZDQ5YzE5M2M=, workerId: [4:7608491727756690033:2624], local sessions count: 1 2026-02-19T08:33:46.366610Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtgh8p01thwt5k01c0vpt3b", Forwarded response to sender actor, requestId: 58, sender: [4:7608491727756690023:2617], selfId: [4:7608491676217080580:2217], source: [4:7608491710576820471:2534] 2026-02-19T08:33:46.383263Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:746: Ctx: { TraceId: 01khtgh8wecz6saxz4dn77j5gy, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=NzhlOGE5ZjgtYzNhYTQ0YmEtYjdhNDg0OTAtNTA3ZDkyNTQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 62, targetId: [4:7608491710576820471:2534] 2026-02-19T08:33:46.383321Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1199: Scheduled timeout timer for requestId: 62 timeout: 300.000000s actor id: [4:7608491727756690087:3138] 2026-02-19T08:33:46.934271Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:106: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2026-02-19T08:33:46.934326Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:121: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.736706s 2026-02-19T08:33:47.224190Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:932: TraceId: "01khtgh8wecz6saxz4dn77j5gy", Forwarded response to sender actor, requestId: 62, sender: [4:7608491727756690086:2638], selfId: [4:7608491676217080580:2217], source: [4:7608491710576820471:2534] 2026-02-19T08:33:47.240520Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1258: Session closed, sessionId: ydb://session/3?node_id=4&id=NzhlOGE5ZjgtYzNhYTQ0YmEtYjdhNDg0OTAtNTA3ZDkyNTQ=, workerId: [4:7608491710576820471:2534], local sessions count: 0 >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteQueryScalar |95.6%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete >> KqpQueryService::DdlMixedDml [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery |95.6%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlers [GOOD] Test command err: 2026-02-19T08:33:49.991272Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.991340Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.991375Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.991802Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:33:49.991841Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.991865Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.992837Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007959s 2026-02-19T08:33:49.993292Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.993685Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:49.993820Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.995784Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.995802Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.995817Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.996154Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:33:49.996192Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.996225Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.996297Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008345s 2026-02-19T08:33:49.996715Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.997094Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:49.997200Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.998178Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.998200Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.998221Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.998570Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:33:49.998611Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.998633Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.998716Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.191931s 2026-02-19T08:33:49.999201Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.999662Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:49.999757Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.000606Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.000624Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.000642Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.001203Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:33:50.001241Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.001259Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.001321Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.271754s 2026-02-19T08:33:50.001680Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.002084Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:50.002175Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.003099Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.003125Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.003170Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.003474Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.003992Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.016831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.017191Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-02-19T08:33:50.017227Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.017267Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.017332Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.203820s 2026-02-19T08:33:50.017619Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:33:50.019015Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.019051Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.019078Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.019506Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.020043Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.020206Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.021638Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.022512Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:50.023022Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:50.090106Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-02-19T08:33:50.090227Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:50.090361Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:50.090406Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-02-19T08:33:50.090455Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:33:50.090491Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:33:50.092107Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.092139Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.092179Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.092433Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.092853Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.093037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.093241Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.093586Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-02-19T08:33:50.094716Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.094749Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.094771Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.095131Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.096237Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.096378Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.096770Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.197783Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.198034Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:33:50.198098Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:50.198153Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:33:50.198222Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:33:50.298695Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:33:50.298885Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-02-19T08:33:49.890643Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.890670Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.890749Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.891278Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.891821Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:49.891900Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.892938Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.892958Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.892979Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.893400Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.893752Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-02-19T08:33:49.893796Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.894657Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.894681Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.894703Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.895078Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-02-19T08:33:49.895135Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.895164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.895452Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-02-19T08:33:49.896377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.896396Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.896414Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.896801Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-02-19T08:33:49.896839Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.896865Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.896925Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-02-19T08:33:49.897945Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:33:49.897968Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:33:49.897993Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.898361Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.898951Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:49.909697Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:33:49.910649Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:49.911202Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-02-19T08:33:49.915193Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-02-19T08:33:49.915560Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:49.915621Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:33:49.915653Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:33:49.915672Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-02-19T08:33:49.915697Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-02-19T08:33:49.915729Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-02-19T08:33:49.915750Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-02-19T08:33:49.915770Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-02-19T08:33:49.915804Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-02-19T08:33:49.915826Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-02-19T08:33:49.915860Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-02-19T08:33:49.915880Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-02-19T08:33:49.915897Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-02-19T08:33:49.915951Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-02-19T08:33:49.915975Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-02-19T08:33:49.915992Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-02-19T08:33:49.916055Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-02-19T08:33:49.916074Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-02-19T08:33:49.916089Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-02-19T08:33:49.916104Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-02-19T08:33:49.916121Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-02-19T08:33:49.916139Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-02-19T08:33:49.916156Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-02-19T08:33:49.916174Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-02-19T08:33:49.916194Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-02-19T08:33:49.916214Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-02-19T08:33:49.916232Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-02-19T08:33:49.916250Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-02-19T08:33:49.916268Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-02-19T08:33:49.916294Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-02-19T08:33:49.916321Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-02-19T08:33:49.916338Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-02-19T08:33:49.916403Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-02-19T08:33:49.916423Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-02-19T08:33:49.916441Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-02-19T08:33:49.916458Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-02-19T08:33:49.916478Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-02-19T08:33:49.916498Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-02-19T08:33:49.916524Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-02-19T08:33:49.916597Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-02-19T08:33:49.916613Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-02-19T08:33:49.916636Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-02-19T08:33:49.916652Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-02-19T08:33:49.916668Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-02-19T08:33:49.916694Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-02-19T08:33:49.916720Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-02-19T08:33:49.916742Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-02-19T08:33:49.916760Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-02-19T08:33:49.916777Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-02-19T08:33:49.916792Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-02-19T08:33:49.916862Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-02-19T08:33:49.919407Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-02-19T08:33:49.919649Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-02-19T08:33:49.919712Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-02-19T08:33:49.919738Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-02-19T08:33:49.919757Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-02-19T08:33:49.919803Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-02-19T08:33:49.919823Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-02-19T08:33:49.919841Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-02-19T08:33:49.919859Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-02-19T08:33:49.919897Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-02-19T08:33:49.919919Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-02-19T08:33:49.919937Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-02-19T08:33:49.919965Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-02-19T08:33:49.919985Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-02-19T08:33:49.920003Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-02-19T08:33:49.920019Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-02-19T08:33:49.920036Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-02-19T08:33:49.920087Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-02-19T08:33:49.920111Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-02-19T08:33:49.920131Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-02-19T08:33:49.920147Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-02-19T08:33:49.920186Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-02-19T08:33:49.920205Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-02-19T08:33:49.920224Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-02-19T08:33:49.920246Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-02-19T08:33:49.920262Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-02-19T08:33:49.920279Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-02-19T08:33:49.920296Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-02-19T08:33:49.920323Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-02-19T08:33:49.920344Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-02-19T08:33:49.920362Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-02-19T08:33:49.920379Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-02-19T08:33:49.920396Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-02-19T08:33:49.920468Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-02-19T08:33:49.920492Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-02-19T08:33:49.920513Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-02-19T08:33:49.920531Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-02-19T08:33:49.920560Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-02-19T08:33:49.920582Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-02-19T08:33:49.920603Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-02-19T08:33:49.920621Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-02-19T08:33:49.920637Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-02-19T08:33:49.920654Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-02-19T08:33:49.920671Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-02-19T08:33:49.920709Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-02-19T08:33:49.920729Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-02-19T08:33:49.920747Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-02-19T08:33:49.920765Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-02-19T08:33:49.920783Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-02-19T08:33:49.920802Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-02-19T08:33:49.920820Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-02-19T08:33:49.920873Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-02-19T08:33:49.921032Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-02-19T08:33:49.922522Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.922550Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.922587Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.922993Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.923571Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:49.923760Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.925253Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.025780Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.026017Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:33:50.026098Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:50.026142Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:33:50.026212Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-02-19T08:33:50.026308Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:33:50.026665Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:33:50.026719Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:33:50.026867Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:33:50.028080Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.028106Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.028129Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.028426Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.028818Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.028970Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.029306Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.130316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.130521Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:33:50.130573Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:50.130603Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:33:50.130663Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:33:50.331077Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-02-19T08:33:50.431547Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:33:50.431669Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:33:50.431986Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-02-19T08:33:50.433468Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.433499Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.433579Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:50.434267Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:50.434943Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:50.435143Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.435865Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:50.538566Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:50.538783Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:33:50.538842Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:50.538874Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-02-19T08:33:50.538933Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-02-19T08:33:50.539048Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-02-19T08:33:50.539175Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-02-19T08:33:50.539259Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:33:50.539393Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpQueryService::TableSink_OlapOrder [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> Yq_1::Basic_EmptyDict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: 2026-02-19T08:32:55.582689Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491511113401125:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.584355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:56.077143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.080366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.080459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.181520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.190417Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.193881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491511113401070:2081] 1771489975570091 != 1771489975570094 2026-02-19T08:32:56.264004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.446408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.446437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.446446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.446557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.586230Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.135679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.203943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.380947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.553326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.637653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.435665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528293272129:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.435775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.436153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491528293272139:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.436238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.771621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.803551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.833477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.890724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.918879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.960230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.033111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.081123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:00.163086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491532588240317:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.163216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.163540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491532588240322:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.163605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491532588240323:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.163724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:00.167424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.180499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491532588240326:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.264325Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491532588240377:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.582030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491511113401125:2084];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.582097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:02.811179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:02.811273Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path= ... s::TEvents::TEvWakeup; 2026-02-19T08:33:34.519579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.739246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:34.841831Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.826963Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491689218480296:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.827062Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.827543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491689218480306:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.827587Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.897829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.945029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.985820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.026330Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.072154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.112108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.152441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.203742Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.307440Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491693513448470:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.307505Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.307694Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491693513448475:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.307740Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491693513448476:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.307787Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.312089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:38.324172Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491693513448479:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:38.425954Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491693513448530:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:38.465249Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491672038609284:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:38.465327Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:40.471290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.474400Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.476512Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.583414Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.595010Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.614961Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.621407Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.621946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:40.622370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:46.858716Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771490026894, txId: 281474976710731] shutting down >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityBatchInvalid [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::CreatePartitionStream >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: 2026-02-19T08:32:55.606874Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491509294685658:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:55.607248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:32:55.960632Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:32:56.101821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:32:56.101909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:32:56.108018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:32:56.172015Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:32:56.173920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491509294685586:2081] 1771489975562242 != 1771489975562245 2026-02-19T08:32:56.209343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:32:56.485202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:32:56.485244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:32:56.485251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:32:56.485330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:32:56.625946Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:32:57.004452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:32:57.013141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:32:57.067412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.312883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.491441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:57.576149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.283584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526474556660:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.283702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.284048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526474556670:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.284093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.626607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.656823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.684067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.715822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.748412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.813786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.855961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.908945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:32:59.987374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526474557542:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.987444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.987761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526474557547:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.987800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491526474557548:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.987804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:32:59.991358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:00.002719Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491526474557551:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:00.084233Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491530769524898:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:00.605808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491509294685658:2091];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:00.605887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:01.510098Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491535064492513:3982] txid# 281474976710674, issues: { message: "Group already exists" severity: 1 } 2026-02-19T08:33:01.519309Z no ... he_undelivered_message; 2026-02-19T08:33:42.663782Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:42.775586Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:42.791472Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:42.791542Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:42.816410Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:42.837797Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:42.950425Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:42.950455Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:42.950464Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:42.950563Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:43.459740Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:43.480215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.560072Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.778870Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:43.837565Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.926425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.171093Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491731434341737:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.171208Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.171534Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491731434341746:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.171581Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.246308Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.286337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.322563Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.373988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.412118Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.451046Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.494072Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.551822Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:47.641964Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491709959503431:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:47.642029Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:47.710563Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491731434342616:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.710667Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.710792Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491731434342621:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.711046Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491731434342623:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.711106Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:47.714529Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:47.725411Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491731434342624:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:47.819110Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491731434342679:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:49.785440Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [4:7608491740024277598:2543], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2026-02-19T08:33:49.785948Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=4&id=YWFmODAzNGMtNTIwNmNiMmMtNjQwYTUwNTctNjMxODU4MTk=, ActorId: [4:7608491740024277591:2539], ActorState: ExecuteState, LegacyTraceId: 01khtghc5t7m62pvy4xykmr4e1, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Optimization" issue_code: 1070 severity: 1 issues { position { row: 8 column: 25 } message: "Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations." end_position { row: 8 column: 25 } issue_code: 2009 severity: 1 } } tx_id# trace_id# >> TestSqsTopicHttpProxy::TestChangeMessageVisibilityBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-02-19T08:33:49.819856Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.819908Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.819968Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.822775Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.825413Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:49.843912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.853241Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-02-19T08:33:49.857203Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:49.857460Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:33:49.857840Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:33:49.858060Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:33:49.858171Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:49.858220Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:33:49.858252Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:33:49.858825Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.858877Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:33:49.858898Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:33:49.858922Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:33:49.859045Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.859151Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-02-19T08:33:49.859173Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-02-19T08:33:49.859190Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:33:49.859260Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.859275Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-02-19T08:33:49.859287Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-02-19T08:33:49.859307Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:33:49.859365Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2026-02-19T08:33:49.863007Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.863035Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.863057Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:49.863343Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:49.865983Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:49.866172Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:49.866599Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-02-19T08:33:49.867480Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:49.867729Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-02-19T08:33:49.868045Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-02-19T08:33:49.868282Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-02-19T08:33:49.868372Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:49.868409Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:33:49.868460Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:33:49.868596Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.868632Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:33:49.868653Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-02-19T08:33:49.868686Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:33:49.868831Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.868905Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-02-19T08:33:49.868924Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-02-19T08:33:49.868942Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-02-19T08:33:49.869022Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-02-19T08:33:49.869056Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-02-19T08:33:49.869076Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-02-19T08:33:49.869095Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00 ... 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 187 SeqNo: 228 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 188 SeqNo: 229 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 189 SeqNo: 230 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:33:52.015940Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2026-02-19T08:33:52.119605Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:33:52.119642Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:33:52.119836Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:52.125949Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:52.126473Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:52.126743Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-02-19T08:33:52.128202Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-02-19T08:33:52.214355Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-02-19T08:33:52.215348Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:52.217141Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:33:52.220015Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-02-19T08:33:52.220904Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-02-19T08:33:52.233189Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-02-19T08:33:52.234027Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-02-19T08:33:52.234831Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-02-19T08:33:52.235682Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-02-19T08:33:52.243729Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-02-19T08:33:52.244560Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-02-19T08:33:52.244629Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-02-19T08:33:52.244805Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-02-19T08:33:52.248326Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2026-02-19T08:33:52.258805Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:52.258849Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:52.258880Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:52.259135Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:52.259468Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:52.259762Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:52.260304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:52.260450Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:52.260550Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:52.260604Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-02-19T08:33:52.260736Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateViewOnTable >> KqpNamedExpressions::NamedExpressionRandomSelect+UseSink [GOOD] >> KqpNamedExpressions::NamedExpressionRandomSelect-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2026-02-19T08:32:41.052419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491451571604204:2085];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:32:41.052479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0219 08:32:41.211412004 598807 dns_resolver_ares.cc:452] no server name supplied in dns URI E0219 08:32:41.211541737 598807 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-02-19T08:32:41.379323Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.379609Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.379657Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.438634Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.438720Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.439135Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.439229Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.439355Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.439978Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.443982Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.464828Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.487949Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:399: ydb/core/fq/libs/actors/nodes_manager.cpp:357: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27463 2026-02-19T08:32:41.488039Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.488107Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.488190Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.488231Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.495306Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.495494Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.516735Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.541149Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.541303Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.541349Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.541410Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.589377Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.589473Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.589537Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.589692Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.589775Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.600902Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:27463: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27463 } ] 2026-02-19T08:32:41.601242Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2746 ... 748Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:49.068396Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-02-19T08:33:49.068515Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491740959950827 RawX2: 4503629692144542 } } DstEndpoint { ActorId { RawX1: 7608491740959950828 RawX2: 4503629692144543 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7608491740959950828 RawX2: 4503629692144543 } } DstEndpoint { ActorId { RawX1: 7608491740959950823 RawX2: 4503629692144166 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-02-19T08:33:49.068543Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:49.068552Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:49.068568Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:49.068581Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:469: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. All outputs have been finished. Consider finished 2026-02-19T08:33:49.068590Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:49.068616Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:981: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-02-19T08:33:49.068626Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1086: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. Taken 0 locks 2026-02-19T08:33:49.068642Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1101: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. new data for read #0 seqno = 1 finished = 1 2026-02-19T08:33:49.068661Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-02-19T08:33:49.068676Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:49.068689Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1329: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-02-19T08:33:49.068704Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1259: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-02-19T08:33:49.068728Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1306: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-02-19T08:33:49.068747Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1368: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. returned 1 rows; processed 1 rows 2026-02-19T08:33:49.068792Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1408: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. dropping batch for read #0 2026-02-19T08:33:49.068800Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:464: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. effective maxinflight 1024 sorted 0 2026-02-19T08:33:49.068812Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-02-19T08:33:49.068828Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1433: TxId: 281474976710744, task: 1, CA Id [7:7608491740959950827:2974]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-02-19T08:33:49.068924Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:49.068940Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:49.069011Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-02-19T08:33:49.069061Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:49.069078Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [7:7608491740959950828:2975], TxId: 281474976710744, task: 2. Ctx: { CheckpointId : . TraceId : 01khtghbg7cwbdhp0t755zr1jj. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:49.069202Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710744;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-02-19T08:33:49.069348Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-02-19T08:33:49.069365Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:550: SelfId: [7:7608491740959950827:2974], TxId: 281474976710744, task: 1. Ctx: { TraceId : 01khtghbg7cwbdhp0t755zr1jj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MThlNmIwYTUtNTg2ZDkxNDYtYmMxMjhlZjMtM2MzNTEzYTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-02-19T08:33:49.069517Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710744;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> KqpService::RetryAfterShutdownThenDisconnect [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] |95.6%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteRetryQuery [GOOD] >> KqpQueryService::Explain >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressZstd >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] |95.6%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RetryAfterShutdownThenDisconnect [GOOD] Test command err: 2026-02-19T08:33:02.629667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491541728253394:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:02.639330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:03.001801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:03.030911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:03.031328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:03.083562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:03.121894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491541728253273:2081] 1771489982612188 != 1771489982612191 2026-02-19T08:33:03.141468Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:03.156170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:03.296667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:03.296700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:03.296709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:03.296840Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:03.652631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:03.973445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:04.017088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:04.084261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:04.295695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:04.545936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:04.672493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:06.967383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491558908124351:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:06.967491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:06.967772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491558908124361:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:06.967803Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.391824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.436606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.476497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.540825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.575677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.629300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491541728253394:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:07.629376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:07.656648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.717177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.769148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:07.834628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491563203092534:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.834694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.834988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491563203092539:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.835000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491563203092540:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.835034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:07.838767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:07.848456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491563203092543:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:07.934057Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491563203092594:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:09.473980Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3049} SessionId: ydb://session/3?node_id=1&id=YTNiOThhYjUtMmQ1Y2E2OWUtODkyZWE5NzktZjQ0ZDA3YWM=, ActorId: [1:7608491571793027482:2538] ... LookupError; 2026-02-19T08:33:32.957948Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:33.084196Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:33.084338Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:33.085156Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:33.085254Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:33.160886Z node 3 :HIVE WARN: hive_impl.cpp:818: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-02-19T08:33:33.161439Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:33.162059Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:33.256403Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:33.271334Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:33:34.107876Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:34.107942Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:34.107980Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:34.108388Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:34.177565Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:34.177676Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:34.825836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:35.171146Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:36.013670Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.167151Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.342320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:39.939756Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:3259:4034], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:39.940231Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:39.941432Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:3333:4053], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:39.941615Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:39.975876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.763807Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:41.331126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:42.071132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:42.581549Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.312067Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.994336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:45.027050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.230902Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:4857:5097], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.231066Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.231888Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:4861:5101], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.232089Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.232196Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:4864:5104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.242120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:47.073378Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:4866:5106], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:47.186938Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:4982:5184] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:48.870807Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.529145Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:977} ActorId: [3:6068:5812] TxId: 281474976715674. Ctx: { TraceId: 01khtghddv1n6bmz8y69m641kf, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MWFiMWNjYTItY2ViNTllYzctNTZhMmJiNmYtMjUwNjAzMDU=, PoolId: default, IsStreamingQuery: 0}. Stop executing Reason# NODE_SHUTTING_DOWN Message# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: 2026-02-19T08:33:29.736232Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491657107485676:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:29.745539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:30.095623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:30.095705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:30.134945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:30.154108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491657107485644:2081] 1771490009732718 != 1771490009732721 2026-02-19T08:33:30.175669Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:30.187483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:30.382444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:30.382468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:30.382474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:30.382559Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:30.418998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:30.753849Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:30.883729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:30.890557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:30.956147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.138826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.295797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:31.363410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.243736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491674287356710:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.243842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.244284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491674287356720:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.244343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.566504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.607963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.642308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.677853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.707724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.745272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.799141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.851891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:33.937528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491674287357590:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.937626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.938011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491674287357596:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.938012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491674287357595:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.938053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:33.943400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:33.968558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491674287357599:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:34.039814Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491678582324946:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:34.735777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491657107485676:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:34.735840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:36.800076Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491686450290692:2069];send_to=[0:7307199536658146131:7762515 ... l default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.977800Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.022727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.094653Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.128782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.165180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.204629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.240322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.291766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.372532Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491730412445768:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.372654Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.372990Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491730412445773:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.373048Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491730412445774:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.373169Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.378855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:46.390500Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491730412445777:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:46.475457Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491730412445828:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:48.237225Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=YmE4NDEzOTItNGIxOTdhYTktZDQ4N2JlNWQtNzFjOWY2Mg==, ActorId: [3:7608491739002380716:2536], ActorState: ReadyState, LegacyTraceId: 01khtghap101r8jm4wg8nsd7j3, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01khtghann5rk2e63g8y8byxj3" issue_code: 2015 severity: 1 } trace_id# 2026-02-19T08:33:49.332567Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491740263617439:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:49.332611Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:49.365778Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:49.468113Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:49.472236Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491740263617408:2081] 1771490029331175 != 1771490029331178 2026-02-19T08:33:49.485051Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:49.485134Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:49.494621Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:49.549585Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:49.549605Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:49.549612Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:49.549693Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:49.634520Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:50.097378Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:50.338843Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:53.366465Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491757443487461:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.366608Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.366962Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491757443487496:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.371690Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:53.382273Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491757443487498:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-02-19T08:33:53.464880Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491757443487560:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:53.561156Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491757443487582:2547] txid# 281474976715660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2026-02-19T08:33:53.561897Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=4&id=MzU5YzcwNmYtYmNjNmJkMjMtMjE4OWU3MjQtZDBkODZhZmQ=, ActorId: [4:7608491757443487457:2326], ActorState: ExecuteState, LegacyTraceId: 01khtghch7avhv2h67s997y3qx, Create QueryResponse for error on request, msg: status# SCHEME_ERROR issues# { message: "Executing ESchemeOpCreateTable" issue_code: 2003 severity: 1 issues { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" issue_code: 2003 severity: 1 } }{ message: "Query invalidated on scheme/internal error during Scheme execution" issue_code: 2019 severity: 1 } trace_id# |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] Test command err: 2026-02-19T08:33:55.533018Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.533047Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.533065Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.533535Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.545929Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.564808Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.565801Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.574600Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.574621Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.574638Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.578105Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.589963Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.590143Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.593873Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.594226Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-02-19T08:33:55.595464Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.601774Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.601818Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.609950Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.610603Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.610736Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.613885Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.614322Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.614435Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.614507Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.614547Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:33:55.615251Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.615273Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.615290Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.625966Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.637946Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.638138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.641900Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.642364Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.647411Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.648270Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.648341Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:33:55.654824Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.654847Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.654871Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.664969Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.665668Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.665833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.669922Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.686084Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.690224Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.692784Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.692859Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:33:55.697933Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.697963Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.697984Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.701916Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.702626Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.702769Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.705885Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.706301Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.709945Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.710050Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.710101Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:33:55.710830Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.710852Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.710881Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.726336Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.726964Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.727099Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.729447Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.729895Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.731788Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.731884Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.731920Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:33:55.732855Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.732876Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.732900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.745999Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.746571Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.746720Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.747120Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.747638Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.747826Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.747935Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.747975Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-02-19T08:33:55.748902Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.748925Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.748947Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.749217Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.749850Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.750009Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.753932Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.755466Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.755897Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-02-19T08:33:55.755981Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.756021Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-02-19T08:33:55.757730Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.757758Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.761811Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-02-19T08:33:55.762227Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-02-19T08:33:55.763019Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-02-19T08:33:55.763209Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-02-19T08:33:55.765874Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-02-19T08:33:55.766961Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:55.767712Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-02-19T08:33:55.768030Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-02-19T08:33:55.768178Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-02-19T08:33:55.768263Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-02-19T08:33:55.768289Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-02-19T08:33:55.768314Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-02-19T08:33:55.768331Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-02-19T08:33:55.768365Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-02-19T08:33:55.768385Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-02-19T08:33:55.768535Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2026-02-19T08:33:55.768657Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: 2026-02-19T08:33:25.945460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491637915775412:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:25.945498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:26.217905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:26.275991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:26.276095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:26.332216Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:26.337851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491637915775195:2081] 1771490005896383 != 1771490005896386 2026-02-19T08:33:26.349332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:26.455284Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:26.506154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:26.506184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:26.506198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:26.506310Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:26.938209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:26.950856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:27.008816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:27.151744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:27.301057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:27.364527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.506753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491655095646274:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:29.506868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:29.513861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491655095646284:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:29.514014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:29.891655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.932744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:29.972776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.018515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.063850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.103838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.138621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.223584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:30.334640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491659390614455:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:30.334722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:30.335066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491659390614460:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:30.335103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491659390614461:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:30.335212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:30.339572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:30.362575Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491659390614464:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:30.467516Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491659390614517:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:30.945517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491637915775412:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:30.945579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:33.245702Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491673570589551:2071];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:33.252597Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/mig ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.736427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.783889Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:43.804783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.174161Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491729207019987:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.174258Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.174553Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491729207019997:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.174616Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.240089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.281181Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.319707Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.357519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.398287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.436475Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.473516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.539783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:46.625845Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491729207020864:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.625931Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.627903Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491729207020870:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.627961Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491729207020869:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.628005Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:46.632497Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:46.646134Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491729207020873:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:46.709074Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491729207020924:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:47.779401Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491712027148966:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:47.779469Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:48.272651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.274962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.276724Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.401535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:48.406216Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:48.412410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:48.418845Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:48.419353Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:48.419729Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-02-19T08:33:52.276674Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:241: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1771490032312, txId: 281474976710714] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: 2026-02-19T08:33:27.682118Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491648467954414:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:27.684634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:27.993377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:27.993509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:28.041228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:28.058301Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491648467954360:2081] 1771490007676132 != 1771490007676135 2026-02-19T08:33:28.069947Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:28.073243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:28.182223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:28.182242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:28.182255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:28.182314Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:28.275852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:28.690294Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:28.694430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:31.148111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491665647824437:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.148255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.148621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491665647824447:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.148667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:31.440798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:33:31.603334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:31.603570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:31.603768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:31.603922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:31.604055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:31.604191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:31.604297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:31.604400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:31.604434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:31.604519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:31.604520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:31.604645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:31.604673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:31.604779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:31.604796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:31.604908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:31.604968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:31.605075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491665647824596:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:31.605100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:31.605194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:31.605315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:31.605430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:31.605554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:31.605676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:31.605810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:31.605904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491665647824595:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:31.662808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7608491665647824598:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19 ... 75186224037892;self_id=[3:7608491755139663619:2344];ev=NActors::IEventHandle;tablet_id=72075186224037892;tx_id=281474976715658;this=136279210756896;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771490033030;max=18446744073709551615;plan=0;src=[3:7608491737959793690:2156];cookie=52:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.030837Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[3:7608491755139663522:2341];ev=NActors::IEventHandle;tablet_id=72075186224037893;tx_id=281474976715658;this=136279198794848;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771490033030;max=18446744073709551615;plan=0;src=[3:7608491737959793690:2156];cookie=62:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.038762Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.038833Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.038858Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.042189Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.042254Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.042277Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055220Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055290Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055312Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055407Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055456Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.055486Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.063800Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.063851Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.063866Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.067462Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.067522Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.067543Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.072820Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.072894Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.072922Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.078493Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.078554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.078575Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.082576Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.082629Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.082650Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.089830Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.089899Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.089918Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:53.106158Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:53.228369Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491759434631237:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.228490Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.228889Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491759434631242:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.228967Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491759434631243:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.229149Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:53.233020Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:53.246167Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491759434631246:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:33:53.332817Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491759434631297:2845] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:53.829783Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491737959793348:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:53.829888Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::TableSink_OltpInteractive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] Test command err: 2026-02-19T08:33:33.647434Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491673552135772:2190];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:33.647492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:33.704118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:33.975356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:33.975467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:34.058608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:34.081075Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491673552135619:2081] 1771490013639316 != 1771490013639319 2026-02-19T08:33:34.091358Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:34.096355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:34.325159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:34.351938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:34.351961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:34.351967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:34.352046Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:34.681878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:34.873916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:34.886166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:36.998284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491686437038378:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.998284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491686437038401:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.998361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.998608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491686437038413:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:36.998684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.001753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:37.012291Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491686437038412:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-02-19T08:33:37.152431Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491690732005761:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:37.673555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.903640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:37.910988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-02-19T08:33:38.831120Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491693484282196:2162];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:38.831291Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:38.882280Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:38.966874Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:38.969205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:38.969272Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:38.969853Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491693484282059:2081] 1771490018815727 != 1771490018815730 2026-02-19T08:33:39.045661Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:39.053828Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:39.114264Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:39.114289Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:39.114296Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:39.114367Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:39.513399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:39.522712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:39.841480Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:42.070216Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491710664152128:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.070282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491710664152142:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.070335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.073895Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491710664152146:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.073974Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.074425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:42.085982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyT ... shard__operation_create_table.cpp:689) 2026-02-19T08:33:48.372406Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.532375Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.612112Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:48.743000Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:51.222723Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491751842176883:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.222809Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.223111Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491751842176892:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.223187Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.357423Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.399722Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.438751Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.480758Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.525196Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.575283Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.624422Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.704095Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:51.811321Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491751842177764:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.811417Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.811851Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491751842177769:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.811903Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491751842177770:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.812019Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:51.816190Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:51.833533Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491751842177773:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:51.893630Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491751842177824:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:52.653299Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491734662306002:2201];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:52.653363Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:53.605486Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:53.718082Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:53.776492Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.332972Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.531284Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.645975Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.894275Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.120328Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.300109Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> KqpQueryService::TableSink_OlapDelete [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink >> TPartitionTests::CorrectRange_Commit >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> TFixTransactionStatesTests::Transaction_Adjacent_ReadRanges [GOOD] >> TFixTransactionStatesTests::Transaction_Multiple_ReadRanges [GOOD] >> TInFlightControllerTest::TestAddBasic [GOOD] >> TInFlightControllerTest::TestAddExactLimit [GOOD] >> TInFlightControllerTest::TestAddManyOffsets [GOOD] >> TInFlightControllerTest::TestAddManyOffsets2 [GOOD] >> TInFlightControllerTest::TestAddManyOffsets3 [GOOD] >> TInFlightControllerTest::TestAddBigMessageAndSmallMessagesAfterIt [GOOD] >> TInFlightControllerTest::SlidingWindowTest >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> KqpQueryService::ExecuteQueryScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: 2026-02-19T08:33:41.854884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491706579442060:2147];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:41.855095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:41.900957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:42.145139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:42.145263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:42.191857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:42.196105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:42.245697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491706579441951:2081] 1771490021848745 != 1771490021848748 2026-02-19T08:33:42.250124Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:42.320625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:42.320664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:42.320676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:42.320752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:42.441269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:42.751593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:42.868523Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:44.779651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491719464344730:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.779741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.780029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491719464344740:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.780102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.991158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:45.127461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491723759312132:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.127532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.127869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491723759312137:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.127907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491723759312138:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.128018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:45.131913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:45.141108Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491723759312141:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:45.258312Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491723759312192:2594] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:46.462621Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491727980781112:2072];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:46.462667Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:46.483974Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:46.587124Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:46.593795Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7608491727980781070:2081] 1771490026461887 != 1771490026461890 2026-02-19T08:33:46.598690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:46.598785Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:46.626497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:46.698594Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:46.698610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:46.698615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:46.698677Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:46.750163Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:47.127965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:47.472168Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:49.742721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491740865683853:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:49.742799Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:49.743080Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491740865683862:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:49.743123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:49.800383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:49.862079Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491740865683956:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... TRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2026-02-19T08:33:50.326862Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:33:50.327061Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-02-19T08:33:50.327272Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [2:7608491745160651397:2367], Table: `/Root/DataShard` ([72057594046644480:38:1]), SessionActorId: [2:7608491745160651382:2367]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7608491745160651397:2367].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:33:50.327848Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [2:7608491745160651391:2367], SessionActorId: [2:7608491745160651382:2367], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7608491745160651382:2367]. 2026-02-19T08:33:50.328164Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=2&id=NzM4NzFmN2EtNzdlYjBlMzMtMjQ0MWJkNC05NjkzNGEzMA==, ActorId: [2:7608491745160651382:2367], ActorState: ExecuteState, LegacyTraceId: 01khtghcntfttr7de225vgxfkc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7608491745160651392:2367] from: [2:7608491745160651391:2367] trace_id# 2026-02-19T08:33:50.328305Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [2:7608491745160651392:2367] TxId: 281474976715663. Ctx: { TraceId: 01khtghcntfttr7de225vgxfkc, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzM4NzFmN2EtNzdlYjBlMzMtMjQ0MWJkNC05NjkzNGEzMA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:33:50.328595Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=2&id=NzM4NzFmN2EtNzdlYjBlMzMtMjQ0MWJkNC05NjkzNGEzMA==, ActorId: [2:7608491745160651382:2367], ActorState: ExecuteState, LegacyTraceId: 01khtghcntfttr7de225vgxfkc, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:33:51.355686Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7608491751794031674:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:51.355770Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:51.390645Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:51.512558Z node 3 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:51.533990Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:51.534069Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:51.554265Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:51.638060Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:51.638080Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:51.638088Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:51.638176Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:51.673908Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:52.227881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:52.236334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:52.373852Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:55.257617Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973901695:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.257726Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.258028Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973901705:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.258091Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.294642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.514639Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.802097Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973903033:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.802194Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.802607Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973903035:2444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.802647Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.814115Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973903039:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.814256Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.814650Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973903044:2449], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.814716Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768973903045:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.814867Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.818538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:55.835005Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491768973903048:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-02-19T08:33:55.897076Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491768973903099:3399] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:56.357870Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491751794031674:2081];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:56.357936Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTest::TestPartitionTotalQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: 2026-02-19T08:33:21.348972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491620703996260:2254];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:21.349015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:21.617296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:21.646380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:21.646473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:21.694742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:21.719739Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:21.721098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491620703996044:2081] 1771490001332095 != 1771490001332098 2026-02-19T08:33:21.843782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:21.843800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:21.843806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:21.843855Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:21.891855Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:22.349941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:22.401668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:22.415142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:24.663551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491633588898831:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:24.663763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:24.664778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491633588898841:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:24.664836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:24.935837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:33:25.100037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:25.100250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:25.100470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:25.100590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:25.100706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:25.100821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:25.100930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:25.101035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:25.101156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:25.101274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:25.101393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:25.101504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:25.101628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491637883866269:2334];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:25.110516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:25.110615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:25.110821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:25.110947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:25.111066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:25.111424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:25.111532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:25.111653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:25.111811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:25.111935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:25.112072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:25.112204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:25.112312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7608491637883866271:2336];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:25.156815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id= ... ChunksMeta; 2026-02-19T08:33:55.832759Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[3:7608491769048196462:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:55.832865Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[3:7608491769048196462:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:55.837071Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-02-19T08:33:55.837134Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-02-19T08:33:55.837237Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-02-19T08:33:55.837275Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-02-19T08:33:55.837460Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-02-19T08:33:55.837501Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-02-19T08:33:55.837597Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-02-19T08:33:55.837637Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-02-19T08:33:55.837682Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-02-19T08:33:55.837733Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-02-19T08:33:55.837771Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-02-19T08:33:55.837806Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-02-19T08:33:55.838014Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-02-19T08:33:55.838057Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-02-19T08:33:55.838178Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-02-19T08:33:55.838214Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-02-19T08:33:55.838254Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-02-19T08:33:55.838287Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-02-19T08:33:55.838329Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-02-19T08:33:55.838359Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-02-19T08:33:55.838490Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-02-19T08:33:55.838531Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-02-19T08:33:55.838616Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-02-19T08:33:55.838655Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-02-19T08:33:55.843300Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[3:7608491769048196462:2337];ev=NActors::IEventHandle;tablet_id=72075186224037890;tx_id=281474976710658;this=136322241140960;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1771490035842;max=18446744073709551615;plan=0;src=[3:7608491751868326638:2144];cookie=32:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847320Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847389Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847410Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847550Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847600Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.847619Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.857375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.858401Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.858434Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.892121Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491769048196562:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.892241Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.892526Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491769048196567:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.892590Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491769048196568:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.892729Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.897136Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:55.910645Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491769048196571:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:56.008244Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491773343163918:2635] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:56.447837Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491751868326341:2080];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:56.451030Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: 2026-02-19T08:33:37.982323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491689469337744:2090];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:37.982763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:38.255970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:38.290738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:38.290858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:38.350475Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:38.350884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:38.419965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:38.447820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:38.447838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:38.447847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:38.447927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:38.957265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:38.966864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:38.993846Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:41.287749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491706649207745:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:41.287888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:41.288384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491706649207755:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:41.288463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:41.604590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:33:41.768976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:41.769063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:41.769212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:41.769213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:41.769399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:41.769411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:41.769501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:41.769560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:41.769575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:41.769626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:41.769691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:41.769755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:41.769798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:41.769810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:41.769864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:41.769877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:41.770006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:41.770056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:41.770096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:41.770154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:41.770164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:41.770237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491706649207897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:41.770263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:41.770377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:41.770503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:41.770618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491706649207896:2335];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:41.824714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7608491706649207900:2338];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:41.8247 ... N: log.cpp:841: tablet_id=72075186224037890;self_id=[3:7608491764236464625:2341];ev=NActors::IEventHandle;tablet_id=72075186224037890;tx_id=281474976715658;this=137144402057344;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771490035215;max=18446744073709551615;plan=0;src=[3:7608491751351562037:2142];cookie=32:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.216144Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[3:7608491764236464627:2342];ev=NActors::IEventHandle;tablet_id=72075186224037893;tx_id=281474976715658;this=137144400809664;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771490035215;max=18446744073709551615;plan=0;src=[3:7608491751351562037:2142];cookie=62:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.217735Z node 3 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037894;self_id=[3:7608491764236464623:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:460;TablesManager not ready=72075186224037894; 2026-02-19T08:33:55.221334Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[3:7608491764236464704:2343];ev=NActors::IEventHandle;tablet_id=72075186224037892;tx_id=281474976715658;this=137144400807424;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1771490035220;max=18446744073709551615;plan=0;src=[3:7608491751351562037:2142];cookie=52:1;;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.226301Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.226366Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.226388Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.227015Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.227085Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.227107Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237191Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237242Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237257Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237279Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237287Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.237304Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.247370Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.247439Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.247461Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.248130Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.248175Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.248194Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257431Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257470Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257495Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257516Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257519Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.257536Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.267954Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.267990Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.268021Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.268038Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.268042Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.268057Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:33:55.314130Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768531432230:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.314252Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.318049Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768531432236:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.318077Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491768531432235:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.318148Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.325442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:55.341473Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491768531432239:2396], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-02-19T08:33:55.412610Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491768531432290:2778] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TSourceIdTests::ExpensiveCleanup >> PQCountersSimple::Partition >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty >> TPartitionTests::ConflictingTxIsAborted >> IncrementalRestoreScan::Empty >> IncrementalRestoreScan::ChangeSenderEmpty >> TSentinelTests::PDiskFaultyState >> KqpQueryService::ShowCreateViewOnTable [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionLevelMetrics >> TInFlightControllerTest::SlidingWindowTest [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: 2026-02-19T08:33:12.791440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491581813415471:2241];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:12.791507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:13.039177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:13.041654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:13.041761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:13.133882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491581813415258:2081] 1771489992713062 != 1771489992713065 2026-02-19T08:33:13.135750Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:13.141536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:13.224065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:13.269857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:13.269892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:13.269909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:13.269972Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:13.683951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:13.693991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:13.757956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:13.802422Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:13.875915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.024376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:14.092975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.103594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598993286330:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.103756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.104117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598993286340:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.104195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.441810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.487818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.520145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.546214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.576273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.620165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.653277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.724196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:16.829382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598993287209:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.829477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.829766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598993287214:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.829766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491598993287215:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.829812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:16.833565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:16.846193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491598993287218:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:16.927233Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491598993287269:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:17.791013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491581813415471:2241];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:17.791102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:18.341927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710 ... hZjAwNzktZjg1MTNkMGEtMzJjMDE4NWYtZTdhMTQ5OGE=, ActorId: [3:7608491742937285757:2835], ActorState: ExecuteState, LegacyTraceId: 01khtghc6x2ffhwtmac6jz8rac, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool status# PRECONDITION_FAILED issues# { message: "Resource pool MyPool was disabled due to zero concurrent query limit" severity: 1 } trace_id# 2026-02-19T08:33:50.956878Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491744799247532:2228];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:50.957003Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:50.998013Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:51.074658Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:51.079555Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491744799247342:2081] 1771490030939784 != 1771490030939787 2026-02-19T08:33:51.112716Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:51.112814Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:51.119097Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:51.282268Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:51.282285Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:51.282291Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:51.282355Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:51.295828Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:51.870586Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:51.893837Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:52.020432Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:52.059306Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:52.238102Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:52.320570Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.176196Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491766274085694:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.176288Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.176591Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491766274085703:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.176632Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.277863Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.322690Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.361619Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.405278Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.445059Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.506483Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.577419Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.642766Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.763592Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491766274086579:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.763705Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.764046Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491766274086584:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.764107Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491766274086585:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.764171Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:55.768704Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:55.786483Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491766274086588:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:33:55.866247Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491766274086639:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:55.955001Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491744799247532:2228];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:55.955099Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2026-02-19T08:33:58.576764Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:33:58.669269Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:33:58.669360Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:33:58.669406Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:33:58.669464Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:33:58.691101Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2026-02-19T08:33:58.692939Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-02-19T08:33:58.693460Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:184:2196] 2026-02-19T08:33:58.694367Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-02-19T08:33:58.694411Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2026-02-19T08:33:58.694441Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:184:2196] 2026-02-19T08:33:58.694490Z node 1 :PERSQUEUE DEBUG: partition.cpp:733: [72057594037927937][Partition][1][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-02-19T08:33:58.694545Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:33:58.694983Z node 1 :PERSQUEUE DEBUG: partition.cpp:4506: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-02-19T08:33:58.695016Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:33:58.695064Z node 1 :PERSQUEUE DEBUG: partition.cpp:2445: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ChangeConfig]) 2026-02-19T08:33:58.695127Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:58.695161Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:58.695202Z node 1 :PERSQUEUE DEBUG: partition.cpp:2509: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ChangeConfig]) 2026-02-19T08:33:58.695256Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:33:58.695279Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-02-19T08:33:58.695309Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][1][StateIdle] Try persist 2026-02-19T08:33:58.695500Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-02-19T08:33:58.695552Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:114: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-02-19T08:33:58.695679Z node 1 :PERSQUEUE INFO: partition.cpp:4365: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2026-02-19T08:33:58.695741Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|721bdecb-abfb4b63-2a3ff596-de74957b_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-02-19T08:33:58.695787Z node 1 :PERSQUEUE DEBUG: partition.cpp:2334: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-02-19T08:33:58.695984Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 2026-02-19T08:33:58.696138Z node 1 :PERSQUEUE DEBUG: partition.cpp:2182: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:33:58.696252Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:151: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-02-19T08:33:58.696316Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:33:58.696350Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:33:58.696387Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-02-19T08:33:58.696419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:58.696442Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:58.696475Z node 1 :PERSQUEUE DEBUG: partition.cpp:2516: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-02-19T08:33:58.696514Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:33:58.696543Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-02-19T08:33:58.696569Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][1][StateIdle] Try persist 2026-02-19T08:33:58.696607Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2026-02-19T08:33:58.696674Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-02-19T08:33:58.696878Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-02-19T08:33:58.696945Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2026-02-19T08:33:58.697003Z node 1 :PERSQUEUE DEBUG: partition.cpp:4293: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2026-02-19T08:33:58.697031Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:33:58.697055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2452: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-02-19T08:33:58.697111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-02-19T08:33:58.697149Z node 1 :PERSQUEUE DEBUG: partition.cpp:2366: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-02-19T08:33:58.697187Z node 1 :PERSQUEUE DEBUG: partition.cpp:2516: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-02-19T08:33:58.697257Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2026-02-19T08:33:58.698229Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2026-02-19T08:33:58.698308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2417: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-02-19T08:33:58.698335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2371: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-02-19T08:33:58.698370Z node 1 :PERSQUEUE DEBUG: partition.cpp:2376: [72057594037927937][Partition][1][StateIdle] Try persist 2026-02-19T08:33:58.698823Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000? size 104 WTime 128 2026-02-19T08:33:58.699133Z node 1 :PERSQUEUE DEBUG: partition.cpp:2182: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-02-19T08:33:58.709536Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037932033 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:33:58.740700Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:33:58.751274Z node 1 :PERSQUEUE DEBUG: partition.cpp:2334: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-02-19T08:33:58.751355Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-02-19T08:33:58.751452Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-02-19T08:33:58.751510Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-02-19T08:33:58.751582Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2026-02-19T08:33:58.751814Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2026-02-19T08:33:58.751862Z node 1 :PERSQUEUE DEBUG: partition.cpp:2358: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-02-19T08:33:58.751900Z node 1 :PERSQUEUE DEBUG: partition.cpp:24 ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TInFlightControllerTest::SlidingWindowTest [GOOD] >> TPQTest::TestReadRuleVersions >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateViewOnTable [GOOD] Test command err: 2026-02-19T08:33:39.954185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491698892131303:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:39.956439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:40.214052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:40.242615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:40.242733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:40.313118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:40.362822Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:40.365815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491698892131175:2081] 1771490019942427 != 1771490019942430 2026-02-19T08:33:40.411318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:40.430803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:40.430824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:40.430831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:40.430903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:40.855862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:40.875657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:40.952583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.965442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:41.136451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:41.322509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:41.434258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.296720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491716072002237:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:43.296831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:43.297317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491716072002247:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:43.297373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:43.665128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.692248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.726589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.762224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.791049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.826934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.896071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:43.949105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:44.033578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491720366970414:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.033648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.033933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491720366970420:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.033990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.034294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491720366970419:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:44.039708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:44.054761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491720366970423:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:44.154911Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491720366970474:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:44.949806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491698892131303:2155];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:44.949872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:46.866287Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491728164839035:2152];send_to=[0:7307199536658146131:7762515 ... 2026-02-19T08:33:53.722069Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7608491758694778566:2081] 1771490033512936 != 1771490033512939 2026-02-19T08:33:53.747384Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:53.834265Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:53.868697Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:53.868723Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:53.868735Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:53.868826Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:54.435267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:54.441918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:54.456028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.538450Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:54.545992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.805346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.885621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.018444Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491775874649640:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.018526Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.018788Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491775874649650:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.018853Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.088911Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.138441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.166435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.197630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.234430Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.267075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.341821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.386612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:57.447004Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491775874650524:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.447084Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.447140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491775874650529:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.447243Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491775874650531:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.447293Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:57.450501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:57.461224Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491775874650533:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:57.515167Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491775874650584:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:58.528539Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491758694778620:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:58.528598Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:59.307637Z node 3 :SYSTEM_VIEWS ERROR: scan_actor_base_impl.h:102: Scan error, actor: [3:7608491784464585494:2547], owner: [3:7608491784464585492:2545], scan id: 0, sys view info: Type: EShowCreate, issues: {
: Error: Path type mismatch, expected: View, found: Table } 2026-02-19T08:33:59.308729Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1267: SelfId: [3:7608491784464585493:2546], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01khtghncq7jyj42ex7zm2hfss. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YjliZDRjYTktZTA1OTk2YTAtMzI3MTNhOTctNjNhMTE1MzY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7608491784464585489:2536], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2026-02-19T08:33:59.309271Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=YjliZDRjYTktZTA1OTk2YTAtMzI3MTNhOTctNjNhMTE1MzY=, ActorId: [3:7608491784464585473:2536], ActorState: ExecuteState, LegacyTraceId: 01khtghncq7jyj42ex7zm2hfss, Create QueryResponse for error on request, msg: status# BAD_REQUEST issues# { message: "Path type mismatch, expected: View, found: Table" severity: 1 } trace_id# >> TInFlightControllerTest::TestRemoveBasic [GOOD] >> TInFlightControllerTest::TestRemoveAtExactOffset [GOOD] >> TInFlightControllerTest::TestRemoveAfterLimit [GOOD] >> TInFlightControllerTest::TestRemoveAll [GOOD] >> TInFlightControllerTest::TestSequentialAddRemove [GOOD] >> TInFlightControllerTest::TestOutOfOrderRemove [GOOD] >> TInFlightControllerTest::TestSmallMaxAllowedSize [GOOD] >> TInFlightControllerTest::TestLargeMaxAllowedSize [GOOD] >> TInFlightControllerTest::TestLayoutUnitCalculation [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=noset >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=foo >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> TPQTest::TestCompaction >> KqpQueryService::Explain [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TInFlightControllerTest::TestLayoutUnitCalculation [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=noset >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=empty >> KqpQueryService::TableSink_OltpOrder [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=foo >> TPQTest::TestWritePQCompact >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=empty >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=noset >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMeteringSink::UnusedStorageV1 [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestSeveralOwners >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: 2026-02-19T08:33:33.919470Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491671330662873:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:33.919512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:34.283395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:34.332999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:34.333102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:34.381372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:34.423287Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:34.425982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491671330662752:2081] 1771490013902682 != 1771490013902685 2026-02-19T08:33:34.561329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:34.561985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:34.561993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:34.561999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:34.562084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:34.928547Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:35.023223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:35.034777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:35.094743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.226582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.382458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.464109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.389849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491688510533827:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.389952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.390309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491688510533837:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.390350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:37.714199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.768075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.802116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.846462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.881148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:37.927083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.009993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.052480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.130540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491692805502012:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.130612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.130958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491692805502017:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.130998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491692805502018:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.131111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:38.134155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:38.144890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491692805502021:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:38.239478Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491692805502072:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:38.922920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491671330662873:2150];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:38.922979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:40.082412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701395436991:2551], DatabaseId: /Root, PoolId: another_pool_id, Failed to ... 2-19T08:33:53.189937Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7608491739568700184:2076];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:53.190007Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:55.483319Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491767061437329:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:55.483381Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:55.498058Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:55.612192Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:55.612283Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:55.616509Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:55.652560Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:55.703060Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:55.838341Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:55.838369Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:55.838377Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:55.838465Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:56.331095Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:56.337198Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:56.348425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:56.426196Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:56.540918Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:56.590068Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:56.674575Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.345561Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491784241308327:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.345681Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.346044Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491784241308337:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.346127Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.439805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.477347Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.509013Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.555823Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.587163Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.623998Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.662542Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.715239Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.861932Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491784241309211:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.862020Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.862267Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491784241309216:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.862292Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491784241309217:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.862333Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.867117Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:59.882188Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491784241309220:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:59.953624Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491784241309271:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:34:00.486437Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491767061437329:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:34:00.486546Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=noset >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=foo >> KqpQueryService::TableSink_DisableSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: 2026-02-19T08:33:31.511063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491664071685293:2066];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:31.511106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:31.587640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:33:31.889776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:31.915964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:31.916083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:31.984912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:32.017792Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:32.042117Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491664071685268:2081] 1771490011501536 != 1771490011501539 2026-02-19T08:33:32.069192Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.007462s 2026-02-19T08:33:32.111097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:32.238235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:32.238261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:32.238274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:32.238364Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:32.546098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:32.610025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:32.625776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:34.725533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491676956588052:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.725638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.726682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491676956588062:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.726737Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:34.964243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:35.081169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491681251555504:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.081242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.081520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491681251555509:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.081546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491681251555510:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.081635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:35.084736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:35.098397Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491681251555513:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:33:35.253153Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491681251555564:2632] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:36.513794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491664071685293:2066];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:36.513850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:38.792360Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7608491696539718692:2082];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:38.792490Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:38.832666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:38.895143Z node 2 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:38.927947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:38.928038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:38.934396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:39.055463Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:39.055485Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:39.055492Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:39.055570Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:39.084213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:39.494779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:39.794355Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:42.195078Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491713719588706:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.195180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.195487Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7608491713719588716:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.195554Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:42.212532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is ... 493522765:2388].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:34:01.635776Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491792493522757:2388], SessionActorId: [3:7608491732363977463:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7608491732363977463:2388]. 2026-02-19T08:34:01.635994Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghqpt8qjqd3pghzjanq73, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7608491792493522758:2388] from: [3:7608491792493522757:2388] trace_id# 2026-02-19T08:34:01.636092Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491792493522758:2388] TxId: 281474976715851. Ctx: { TraceId: 01khtghqpt8qjqd3pghzjanq73, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:34:01.636344Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghqpt8qjqd3pghzjanq73, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:34:01.790452Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=98; 2026-02-19T08:34:01.790739Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [3:7608491792493522802:2388], Table: `/Root/DataShard` ([72057594046644480:38:1]), SessionActorId: [3:7608491732363977463:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7608491792493522802:2388].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:34:01.790793Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491792493522794:2388], SessionActorId: [3:7608491732363977463:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7608491732363977463:2388]. 2026-02-19T08:34:01.791015Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghqvr8d6zbh3fkhy6s2d0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7608491792493522795:2388] from: [3:7608491792493522794:2388] trace_id# 2026-02-19T08:34:01.791095Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491792493522795:2388] TxId: 281474976715853. Ctx: { TraceId: 01khtghqvr8d6zbh3fkhy6s2d0, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:34:01.791336Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghqvr8d6zbh3fkhy6s2d0, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:34:01.955973Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=99; 2026-02-19T08:34:01.956293Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [3:7608491792493522834:2339], Table: `/Root/DataShard` ([72057594046644480:38:1]), SessionActorId: [3:7608491732363977273:2339]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7608491792493522834:2339].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:34:01.956360Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491792493522827:2339], SessionActorId: [3:7608491732363977273:2339], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7608491732363977273:2339]. 2026-02-19T08:34:01.956578Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, ActorId: [3:7608491732363977273:2339], ActorState: ExecuteState, LegacyTraceId: 01khtghr0kem1xjsfwsspknwby, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7608491792493522828:2339] from: [3:7608491792493522827:2339] trace_id# 2026-02-19T08:34:01.956659Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491792493522828:2339] TxId: 281474976715855. Ctx: { TraceId: 01khtghr0kem1xjsfwsspknwby, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:34:01.956902Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, ActorId: [3:7608491732363977273:2339], ActorState: ExecuteState, LegacyTraceId: 01khtghr0kem1xjsfwsspknwby, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:34:02.123527Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=100; 2026-02-19T08:34:02.123848Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [3:7608491796788490167:2339], Table: `/Root/DataShard` ([72057594046644480:38:1]), SessionActorId: [3:7608491732363977273:2339]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7608491796788490167:2339].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:34:02.123914Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491796788490159:2339], SessionActorId: [3:7608491732363977273:2339], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7608491732363977273:2339]. 2026-02-19T08:34:02.124229Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, ActorId: [3:7608491732363977273:2339], ActorState: ExecuteState, LegacyTraceId: 01khtghr5y2rd8ebm8vv7vsaja, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7608491796788490160:2339] from: [3:7608491796788490159:2339] trace_id# 2026-02-19T08:34:02.124317Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491796788490160:2339] TxId: 281474976715857. Ctx: { TraceId: 01khtghr5y2rd8ebm8vv7vsaja, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:34:02.124565Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDViOTMxMC0yN2M3OTYxNS0yMjg3ZTVjNy1kNTgxNzg4ZA==, ActorId: [3:7608491732363977273:2339], ActorState: ExecuteState, LegacyTraceId: 01khtghr5y2rd8ebm8vv7vsaja, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-02-19T08:34:02.291100Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=101; 2026-02-19T08:34:02.291416Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:996: SelfId: [3:7608491796788490200:2388], Table: `/Root/DataShard` ([72057594046644480:38:1]), SessionActorId: [3:7608491732363977463:2388]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7608491796788490200:2388].{
: Error: Conflict with existing key., code: 2012 } 2026-02-19T08:34:02.291497Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [3:7608491796788490192:2388], SessionActorId: [3:7608491732363977463:2388], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7608491732363977463:2388]. 2026-02-19T08:34:02.291726Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghrb24r7j05x850sqss7h, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7608491796788490193:2388] from: [3:7608491796788490192:2388] trace_id# 2026-02-19T08:34:02.291817Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1280} ActorId: [3:7608491796788490193:2388] TxId: 281474976715859. Ctx: { TraceId: 01khtghrb24r7j05x850sqss7h, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-02-19T08:34:02.292093Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3504} SessionId: ydb://session/3?node_id=3&id=ZDJhNzE3MzUtOTEwZjAwNGUtYjU4YWNhY2MtOTE2NWExNDE=, ActorId: [3:7608491732363977463:2388], ActorState: ExecuteState, LegacyTraceId: 01khtghrb24r7j05x850sqss7h, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/DataShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=foo__consumerMPI=foo >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=empty >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> IncrementalRestoreScan::Empty [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=foo >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=empty__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=foo__consumerMPI=foo >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=foo__consumerMPI=foo [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=foo__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: 2026-02-19T08:33:28.616958Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491650673091750:2261];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:28.617006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:29.039933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:29.060165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:29.060282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:29.170683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:29.214367Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:29.218259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608491650673091518:2081] 1771490008592269 != 1771490008592272 2026-02-19T08:33:29.485080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:29.542249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:29.542269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:29.542274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:29.542348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:29.585839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:30.148325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:32.445092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491667852961611:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.445254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.445784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491667852961621:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.445862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:32.720560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:792) 2026-02-19T08:33:32.865304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:32.865594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:32.866001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19T08:33:32.866027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:32.866047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-02-19T08:33:32.866151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:32.866193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-02-19T08:33:32.866265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:32.866316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-02-19T08:33:32.866394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:32.866441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-02-19T08:33:32.866515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:32.866584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-02-19T08:33:32.866601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:32.866680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-02-19T08:33:32.866724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:32.866787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-02-19T08:33:32.866841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:32.866867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-02-19T08:33:32.866945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:32.866961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-02-19T08:33:32.867109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-02-19T08:33:32.867109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:32.867212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7608491667852961765:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:32.867402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-02-19T08:33:32.869848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7608491667852961764:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-02-19T08:33:32.914356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7608491667852961768:2340];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-02-19 ... =resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.628621Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.628678Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.628700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.636961Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.637027Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.637047Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.639623Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.639703Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.639728Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.646641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.646705Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.646726Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.650671Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.650751Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.650778Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.656407Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.656498Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.656526Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.661840Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.661963Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.662012Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.666346Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.666410Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.666431Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.672850Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.672943Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.672970Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.675358Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.675441Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.675464Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.684066Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.684136Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.684179Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:61;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-02-19T08:34:02.725472Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491797197041672:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.725563Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.725764Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491797197041677:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.725780Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7608491797197041678:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.725820Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.729016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:34:02.740241Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7608491797197041681:2425], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-02-19T08:34:02.804475Z node 3 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [3:7608491797197041732:2903] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:34:02.845809Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:627: Compilation failed, self: [3:7608491797197041748:2429], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:30: Error: Data manipulation queries with column-oriented tables are disabled. 2026-02-19T08:34:02.847273Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3007} SessionId: ydb://session/3?node_id=3&id=YTRjZjI1YWQtNTM0YTZmODMtNGFmN2FkYjktMTAwOGFmNjU=, ActorId: [3:7608491797197041670:2419], ActorState: ExecuteState, LegacyTraceId: 01khtghrv19c6e9ywt0b681gyy, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 30 } message: "Data manipulation queries with column-oriented tables are disabled." end_position { row: 2 column: 30 } severity: 1 } } tx_id# trace_id# >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__federation__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=noset >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:34:03.482981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:34:03.602457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:34:03.624162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:34:03.624256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:34:03.624782Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:34:04.115647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:34:04.115768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:34:04.204697Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771490040506947 != 1771490040506951 2026-02-19T08:34:04.217582Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:34:04.271726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:34:04.407440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:34:04.729187Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:839:2732] Exhausted 2026-02-19T08:34:04.729330Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:839:2732] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2026-02-19T08:34:04.729392Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:839:2732] Finish Done ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=foo__consumerMPI=foo [GOOD] Test command err: Run PartitionLevelMetrics(EnableMetricsLevel=1, FirstClassCitizen=1, PartitionMetricsLevel=2, PartitionMonitoringProjectId=, ConsumersMonitoringProjectId=) 2026-02-19T08:34:00.343690Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:34:00.415520Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:34:00.415607Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:34:00.415666Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:00.415732Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:34:00.432900Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:00.454100Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 2 MonitoringProjectId: "" 2026-02-19T08:34:00.455267Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:212:2167] 2026-02-19T08:34:00.456218Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:212:2167] 2026-02-19T08:34:00.457671Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:213:2167] 2026-02-19T08:34:00.458455Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:213:2167] 2026-02-19T08:34:00.463989Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:00.464404Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e069a797-c4392894-8d6e0a78-6846a0f8_0 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:00.472694Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:00.473102Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|195a43b-395f62c7-c112dcde-ca8b58ef_1 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:00.479405Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:00.479700Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8cfca17c-795c2fbd-22761510-67b78c07_2 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:00.485834Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:00.486242Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a4cfc9ee-55a995b1-aa2252df-7d904347_0 generated for partition 1 topic 'topic' owner default 2026-02-19T08:34:00.493873Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:00.494305Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ad21708a-5e5ff7b7-fc86e684-237a7299_1 generated for partition 1 topic 'topic' owner default Got start offset = 0 XXXXX before enable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null 2026-02-19T08:34:00.510881Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:00.517320Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 2 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 3 MonitoringProjectId: "" } MetricsLevel: 2 MonitoringProjectId: "" XXXXX after enable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null Set pipe for create session: [1:298:2291] 2026-02-19T08:34:00.519412Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:297:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 298 RawX2: 4294969587 } Cookie: 123 } via pipe: [1:204:2217] Set pipe for create session: [1:310:2300] 2026-02-19T08:34:00.532955Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:309:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 310 RawX2: 4294969596 } Cookie: 123 } via pipe: [1:204:2217] XXXXX after read: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null 2026-02-19T08:34:00.541405Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:00.547006Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 3 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 2 MonitoringProjectId: "" XXXXX after disable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null Run PartitionLevelMetrics(EnableMetricsLevel=1, FirstClassCitizen=1, PartitionMetricsLevel=2, PartitionMonitoringProjectId=, ConsumersMonitoringProjectId=foo) 2026-02-19T08:34:01.024815Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:34:01.066016Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:34:01.066070Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:34:01.066107Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:01.066148Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:34:01.083982Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:01.084759Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 4 actor [2:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 } Consumers { Name: "user1" Generation: 4 Important: false MetricsLevel: 2 MonitoringProjectId: "foo" } MetricsLevel: 2 MonitoringProjectId: "" 2026-02-19T08:34:01.085344Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:215:2167] 2026-02-19T08:34:01.085944Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [2:215:2167] 2026-02-19T08:34:01.086783Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:216:2167] 2026-02-19T08:34:01.087279Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [2:216:2167] 2026-02-19T08:34:01.091291Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:01.091599Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4aa0e81-3dea9a11-5f1b8624-3ca7e704_0 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:01.09 ... rtition.write.lag_milliseconds: 000 name=topic.partition.write.messages: 20 consumer=user: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 19 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 consumer=user1: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 19 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 Sensor: topic.partition.committed_lag_messages {"foo":[30,20]} Sensor: topic.partition.read.bytes {"foo":[0,0]} Sensor: topic.partition.read.messages {"foo":[0,0]} Set pipe for create session: [9:299:2291] 2026-02-19T08:34:05.291289Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:298:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 299 RawX2: 38654707955 } Cookie: 123 } via pipe: [9:207:2219] Set pipe for create session: [9:311:2300] 2026-02-19T08:34:05.300875Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:310:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 311 RawX2: 38654707964 } Cookie: 123 } via pipe: [9:207:2219] XXXXX after read: host=: monitoring_project_id=foo: database=/Root/PQ: cloud_id=somecloud: folder_id=somefolder: database_id=PQ: topic=topic: partition_id=0: name=topic.partition.producers_count: 3 name=topic.partition.write.bytes: 540 name=topic.partition.write.idle_milliseconds: 000 name=topic.partition.write.lag_milliseconds: 000 name=topic.partition.write.messages: 30 consumer=user: name=topic.partition.committed_lag_messages: 30 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 29 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 consumer=user1: name=topic.partition.committed_lag_messages: 30 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 106 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 29 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 2 name=topic.partition.write.lag_milliseconds: 000 partition_id=1: name=topic.partition.producers_count: 2 name=topic.partition.write.bytes: 360 name=topic.partition.write.idle_milliseconds: 000 name=topic.partition.write.lag_milliseconds: 000 name=topic.partition.write.messages: 20 consumer=user: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 19 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 consumer=user1: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 693 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 19 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 17 name=topic.partition.write.lag_milliseconds: 000 Sensor: topic.partition.committed_lag_messages {"foo":[30,20]} Sensor: topic.partition.read.bytes {"foo":[106,693]} Sensor: topic.partition.read.messages {"foo":[2,17]} 2026-02-19T08:34:05.337357Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:05.343610Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 27 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 27 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 } Consumers { Name: "user1" Generation: 25 Important: false MetricsLevel: 2 MonitoringProjectId: "foo" } MetricsLevel: 3 MonitoringProjectId: "foo" XXXXX after disable: host=: monitoring_project_id=foo: database=/Root/PQ: cloud_id=somecloud: folder_id=somefolder: database_id=PQ: topic=topic: partition_id=0: name=topic.partition.producers_count: 3 name=topic.partition.write.bytes: 540 name=topic.partition.write.idle_milliseconds: 000 name=topic.partition.write.lag_milliseconds: 000 name=topic.partition.write.messages: 30 consumer=user: name=topic.partition.committed_lag_messages: 30 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 29 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 consumer=user1: name=topic.partition.committed_lag_messages: 30 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 106 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 28 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 2 name=topic.partition.write.lag_milliseconds: 000 partition_id=1: name=topic.partition.producers_count: 2 name=topic.partition.write.bytes: 360 name=topic.partition.write.idle_milliseconds: 000 name=topic.partition.write.lag_milliseconds: 000 name=topic.partition.write.messages: 20 consumer=user: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 0 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 19 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 0 name=topic.partition.write.lag_milliseconds: 000 consumer=user1: name=topic.partition.committed_lag_messages: 20 name=topic.partition.committed_read_lag_milliseconds: 000 name=topic.partition.read.bytes: 693 name=topic.partition.read.idle_milliseconds: 000 name=topic.partition.read.lag_messages: 3 name=topic.partition.read.lag_milliseconds: 000 name=topic.partition.read.messages: 17 name=topic.partition.write.lag_milliseconds: 000 Sensor: topic.partition.committed_lag_messages {"foo":[30,20]} Sensor: topic.partition.read.bytes {"foo":[106,693]} Sensor: topic.partition.read.messages {"foo":[2,17]} >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__federation__partitionMetricsLevel=object__partitionMPI=bar__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> KqpNamedExpressions::NamedExpressionRandomSelect-UseSink [GOOD] >> TPartitionTests::Batching >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=empty >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskFaultyGuard >> TPartitionTests::SetOffset >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPartitionTests::Batching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:34:03.593769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:34:03.713176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:34:03.737693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:34:03.737806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:34:03.738443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:34:04.112945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:34:04.113086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:34:04.223059Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771490040507142 != 1771490040507146 2026-02-19T08:34:04.242015Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:34:04.291202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:34:04.390417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:34:04.858191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:34:04.858453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.860797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:34:04.860868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:34:04.861767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:34:04.861855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:34:04.862706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.862855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:34:04.863062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.863119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:34:04.863162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:34:04.863207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:34:04.863691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.863763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:34:04.863807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:34:04.864249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.864283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.864340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:04.864387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:34:04.867720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:34:04.868274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:34:04.868434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:34:04.869140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.869186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-02-19T08:34:04.869225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.880287Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:34:04.989766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.989922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:34:04.989981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:04.990339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:34:04.990415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:04.990590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:34:04.990662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T08:34:04.991999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:34:04.992097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:34:04.992308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:34:04.992350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:566:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-02-19T08:34:04.992743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.992796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046644480] TDone opId# 1:0 ProgressState 2026-02-19T08:34:04.992914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:34:04.992951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:04.992990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:34:04.993022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:04.993061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:34:04.993105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:04.993143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:34:04.993173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:34:04.993269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-02-19T08:34:04.993335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-02-19T08:34:04.993376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Pu ... ountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-02-19T08:34:05.692595Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:69:2116] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-02-19T08:34:05.692712Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1030:2870] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-02-19T08:34:05.693223Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1030:2870] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:34:05.693305Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1030:2870] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2026-02-19T08:34:05.694648Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1030:2870] Handle TEvDescribeSchemeResult Forward to# [1:835:2729] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:34:05.695624Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1032:2872] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:34:05.695884Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1032:2872] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:34:05.696183Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1032:2872] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:34:05.696368Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1032:2872] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-02-19T08:34:03.360811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-02-19T08:34:03.519311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-02-19T08:34:03.555988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-02-19T08:34:03.556080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-02-19T08:34:03.557748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-02-19T08:34:04.112942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:34:04.113079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:34:04.210872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1771490040507108 != 1771490040507112 2026-02-19T08:34:04.223659Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:34:04.276406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:34:04.392074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-02-19T08:34:04.895237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-02-19T08:34:04.895497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.895723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-02-19T08:34:04.895781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5608: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-02-19T08:34:04.896029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-02-19T08:34:04.896104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:34:04.896919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.897067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-02-19T08:34:04.897268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.897329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:317: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-02-19T08:34:04.897376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:371: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-02-19T08:34:04.897419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 CreateParts -> ConfigureParts 2026-02-19T08:34:04.898123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.898182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-02-19T08:34:04.898238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 ConfigureParts -> Propose 2026-02-19T08:34:04.898607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.898637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:04.898679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:04.898718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1729: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-02-19T08:34:04.902054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1798: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-02-19T08:34:04.902607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-02-19T08:34:04.902745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1830: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-02-19T08:34:04.903397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.903434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-02-19T08:34:04.903462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:04.914515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:34:05.021262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-02-19T08:34:05.021404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-02-19T08:34:05.021446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:05.021686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2707: Change state for txid 1:0 Propose -> Done 2026-02-19T08:34:05.021756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-02-19T08:34:05.021931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:605: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-02-19T08:34:05.021990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-02-19T08:34:05.022867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-02-19T08:34:05.022947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-02-19T08:34:05.023152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-02-19T08:34:05.023214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:566:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-02-19T08:34:05.023597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-02-19T08:34:05.023646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:499: [72057594046644480] TDone opId# 1:0 ProgressState 2026-02-19T08:34:05.023785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:34:05.023826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:05.023866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-02-19T08:34:05.023898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:05.023936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1685: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-02-19T08:34:05.023978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1720: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-02-19T08:34:05.024014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-02-19T08:34:05.024043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 1:0 2026-02-19T08:34:05.024142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-02-19T08:34:05.024211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-02-19T08:34:05.024252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Pu ... ration id: 281474976715658:0 2026-02-19T08:34:05.666688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5632: RemoveTx for txid 281474976715658:0 2026-02-19T08:34:05.666822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:616: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-02-19T08:34:05.667545Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:69:2116] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-02-19T08:34:05.667655Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1018:2864] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-02-19T08:34:05.669539Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1018:2864] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-02-19T08:34:05.669656Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1018:2864] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2026-02-19T08:34:05.671042Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1018:2864] Handle TEvDescribeSchemeResult Forward to# [1:835:2729] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-02-19T08:34:05.673529Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037889, clientId# [1:1028:2868], serverId# [1:1029:2869], sessionId# [0:0:0] 2026-02-19T08:34:05.678902Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:34:05.679262Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-02-19T08:34:05.681072Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-02-19T08:34:05.682181Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] BodySize: 18 }] } 2026-02-19T08:34:05.682372Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-02-19T08:34:05.682578Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-02-19T08:34:05.682675Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:1034:2870] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-02-19T08:34:05.683150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3746: Server connected at leader tablet# 72075186224037888, clientId# [1:1035:2874], serverId# [1:1036:2875], sessionId# [0:0:0] 2026-02-19T08:34:05.726354Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:1034:2870] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2026-02-19T08:34:05.726527Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:34:05.726653Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:1034:2870] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-02-19T08:34:05.726709Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-02-19T08:34:05.726839Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1030:2870] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=empty [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=foo >> TPartitionTests::CommitOffsetRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty [GOOD] Test command err: Run PartitionLevelMetrics(EnableMetricsLevel=0, FirstClassCitizen=0, PartitionMetricsLevel=2, PartitionMonitoringProjectId=(NULL), ConsumersMonitoringProjectId=(NULL)) 2026-02-19T08:34:02.279548Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 1 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:34:02.341175Z node 1 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:34:02.341253Z node 1 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:34:02.341311Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.341373Z node 1 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:34:02.358279Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.376390Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 1 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 } MetricsLevel: 2 2026-02-19T08:34:02.377472Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:212:2167] 2026-02-19T08:34:02.380099Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:212:2167] 2026-02-19T08:34:02.383221Z node 1 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:213:2167] 2026-02-19T08:34:02.384948Z node 1 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:213:2167] 2026-02-19T08:34:02.396387Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.396830Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ea6986e-38b86cf3-f771f743-d3dd8e40_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:34:02.409324Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.409753Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4266e456-7111e49f-82121ce4-17b4314d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:34:02.416125Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.416484Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b99ae270-49b1a4d2-e66aa061-c582a9f4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:34:02.422143Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.422514Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bc2aedcc-73c3d642-90f401e6-48c38a98_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:34:02.429901Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.430350Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bde5dfb7-960bc6eb-5060c9e1-2133ef57_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 XXXXX before enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:34:02.440161Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.445973Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 2 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 3 } MetricsLevel: 2 XXXXX after enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Set pipe for create session: [1:298:2291] 2026-02-19T08:34:02.447908Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:297:2290] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 298 RawX2: 4294969587 } Cookie: 123 } via pipe: [1:204:2217] Set pipe for create session: [1:310:2300] 2026-02-19T08:34:02.463684Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [1:309:2299] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 310 RawX2: 4294969596 } Cookie: 123 } via pipe: [1:204:2217] XXXXX after read: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:34:02.475285Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.481082Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 3 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: false MetricsLevel: 2 } MetricsLevel: 2 XXXXX after disable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Run PartitionLevelMetrics(EnableMetricsLevel=0, FirstClassCitizen=0, PartitionMetricsLevel=2, PartitionMonitoringProjectId=(NULL), ConsumersMonitoringProjectId=) 2026-02-19T08:34:02.924345Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 2 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:34:02.965047Z node 2 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:34:02.965099Z node 2 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:34:02.965136Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.965177Z node 2 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:34:02.982118Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:02.982857Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 4 actor [2:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 } Consumers { Name: "user1" Generation: 4 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 2 2026-02-19T08:34:02.983405Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:215:2167] 2026-02-19T08:34:02.986059Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:215:2167] 2026-02-19T08:34:02.988677Z node 2 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:216:2167] 2026-02-19T08:34:02.990482Z node 2 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:216:2167] 2026-02-19T08:34:02.996862Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:02.997126Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|cffeff6f-8d76aacf-abded47-8bd8ee24_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-02-19T08:34:03.003145Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:03.003435Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|16bd6cbd-98b22079-b705395c-b6047e97_1 generate ... PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 23 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 } Consumers { Name: "user1" Generation: 22 Important: false MetricsLevel: 3 } MetricsLevel: 2 MonitoringProjectId: "bar" XXXXX after enable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Set pipe for create session: [8:302:2295] 2026-02-19T08:34:06.274372Z node 8 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [8:301:2294] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 302 RawX2: 34359740663 } Cookie: 123 } via pipe: [8:206:2219] Set pipe for create session: [8:314:2304] 2026-02-19T08:34:06.278612Z node 8 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [8:313:2303] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 314 RawX2: 34359740672 } Cookie: 123 } via pipe: [8:206:2219] XXXXX after read: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null 2026-02-19T08:34:06.285400Z node 8 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:06.290206Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 24 actor [8:206:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 24 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 22 } Consumers { Name: "user1" Generation: 22 Important: false MetricsLevel: 2 } MetricsLevel: 2 MonitoringProjectId: "bar" XXXXX after disable: Sensor: MessageLagByCommittedPerPartition null Sensor: BytesReadPerPartition null Sensor: MessagesReadPerPartition null Run PartitionLevelMetrics(EnableMetricsLevel=0, FirstClassCitizen=1, PartitionMetricsLevel=3, PartitionMonitoringProjectId=bar, ConsumersMonitoringProjectId=) 2026-02-19T08:34:06.646650Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:746: No candidates for SelectForward, node: 9 selfDC: leaderDC: resolveFlags: 1:2:0:(undefined) local: 0 localDc: 0 other: 0 disallowed: 0 tabletId: 72057594037936129 followers: 0 allowLeader: 0 allowFollowers: 0 2026-02-19T08:34:06.699088Z node 9 :PQ_TX INFO: pq_impl.cpp:565: [PQ: 72057594037927937] doesn't have tx info 2026-02-19T08:34:06.699143Z node 9 :PQ_TX INFO: pq_impl.cpp:577: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-02-19T08:34:06.699182Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:06.699226Z node 9 :PERSQUEUE INFO: pq_impl.cpp:614: [PQ: 72057594037927937] doesn't have tx writes info 2026-02-19T08:34:06.723771Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:06.724546Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 25 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 25 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 } Consumers { Name: "user1" Generation: 25 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 3 MonitoringProjectId: "bar" 2026-02-19T08:34:06.725285Z node 9 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [9:215:2167] 2026-02-19T08:34:06.726773Z node 9 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [9:215:2167] 2026-02-19T08:34:06.727894Z node 9 :PERSQUEUE INFO: partition_init.cpp:1175: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [9:216:2167] 2026-02-19T08:34:06.728601Z node 9 :PERSQUEUE INFO: partition.cpp:713: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [9:216:2167] 2026-02-19T08:34:06.735838Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:06.736194Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3d857164-21d42a05-20309cce-41649a11_0 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:06.744071Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:06.744411Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a497f985-e829bd9c-9c966dcc-2ae95a2c_1 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:06.761134Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:06.761493Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2c98fe4b-43440c5a-a5966df2-374da4c3_2 generated for partition 0 topic 'topic' owner default 2026-02-19T08:34:06.767268Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:06.767606Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5b7583f-f9c8b85-51fea55c-d6b935d2_0 generated for partition 1 topic 'topic' owner default 2026-02-19T08:34:06.774166Z node 9 :PERSQUEUE ERROR: pq_impl.cpp:204: Answer error topic: 'topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-02-19T08:34:06.774490Z node 9 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b78bcb1b-733b753c-2199ea05-b3efedbd_1 generated for partition 1 topic 'topic' owner default Got start offset = 0 XXXXX before enable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null 2026-02-19T08:34:06.787273Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:06.796382Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 26 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 26 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 } Consumers { Name: "user1" Generation: 25 Important: false MetricsLevel: 3 MonitoringProjectId: "" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after enable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null Set pipe for create session: [9:303:2295] 2026-02-19T08:34:06.798097Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:302:2294] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 2 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 303 RawX2: 38654707959 } Cookie: 123 } via pipe: [9:207:2219] Set pipe for create session: [9:315:2304] 2026-02-19T08:34:06.802541Z node 9 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][] pipe [9:314:2303] connected; active server actors: 1 Send read request: PartitionRequest { Partition: 1 CmdRead { ClientId: "user1" SessionId: "session1" Offset: 0 Count: 17 Bytes: 16777216 PartitionSessionId: 1 } PipeClient { RawX1: 315 RawX2: 38654707968 } Cookie: 123 } via pipe: [9:207:2219] XXXXX after read: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null 2026-02-19T08:34:06.810075Z node 9 :PERSQUEUE NOTICE: pq_impl.cpp:928: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-02-19T08:34:06.813312Z node 9 :PERSQUEUE INFO: pq_impl.cpp:1319: [PQ: 72057594037927937] Config applied version 27 actor [9:207:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 27 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 25 } Consumers { Name: "user1" Generation: 25 Important: false MetricsLevel: 2 MonitoringProjectId: "" } MetricsLevel: 3 MonitoringProjectId: "bar" XXXXX after disable: Sensor: topic.partition.committed_lag_messages null Sensor: topic.partition.read.bytes null Sensor: topic.partition.read.messages null >> TPartitionTests::SetOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiResult [GOOD] Test command err: 2026-02-19T08:33:37.241888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608491688585449902:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:37.244083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:37.425415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:37.451775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:37.451889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:37.503556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:37.506791Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:37.589803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:37.608683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:37.608705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:37.608710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:37.608799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:37.988606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:38.006208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:38.067254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.216238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.325023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:38.389999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:38.455733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.233499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701470353602:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.233573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.234515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701470353612:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.234573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.583278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.610185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.638857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.708007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.737740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.770865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.809147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.856349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:40.966571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701470354495:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.966696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.967323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701470354500:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.967378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608491701470354501:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.967484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:40.971830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:40.985929Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608491701470354504:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:33:41.087749Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608491705765321851:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:33:42.234765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608491688585449902:2089];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:42.234835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:43.024597Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4885: SelfId: [1:7608491714355256789:2536], SessionActorId: [1:7608491710060289443:2536], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 . sessionActorId=[1:7608491710060289443:2536]. 2026-02-19T08:33:43.024839Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2637} SessionId ... 2515]; 2026-02-19T08:33:56.056808Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:58.751793Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7608491780488596205:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:58.751863Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:58.766514Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:58.844017Z node 4 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:58.845324Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7608491780488596170:2081] 1771490038750825 != 1771490038750828 2026-02-19T08:33:58.880829Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:58.880910Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:58.887086Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:58.940438Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:58.940468Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:58.940476Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:58.940565Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:58.992072Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:59.440162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:59.446819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-02-19T08:33:59.456252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.526854Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.728133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.772511Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:59.797722Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.602334Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491797668467223:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.602416Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.602790Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491797668467232:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.602834Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:02.696397Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.726681Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.772846Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.847900Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.884160Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.925089Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:02.966938Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:03.024705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:34:03.108403Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491801963435401:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:03.108506Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:03.108533Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491801963435406:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:03.108716Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7608491801963435408:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:03.108783Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:34:03.111756Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:34:03.124904Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7608491801963435409:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-02-19T08:34:03.218647Z node 4 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [4:7608491801963435461:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:34:03.752042Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7608491780488596205:2074];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:34:03.752129Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=empty__consumerMPI=foo [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=noset >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPartitionTests::CommitOffsetRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNamedExpressions::NamedExpressionRandomSelect-UseSink [GOOD] Test command err: 2026-02-19T08:29:36.917005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7608490656946224783:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:36.917580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:29:37.099793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:29:37.158543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:29:37.158644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:29:37.162299Z node 1 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:29:37.163632Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7608490656946224737:2081] 1771489776915173 != 1771489776915176 2026-02-19T08:29:37.172307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:29:37.245455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:29:37.245473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:29:37.245478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:29:37.245558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:29:37.308813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:29:37.552983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:29:37.607538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.690006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.808613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.864909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:37.985585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:29:39.720120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490669831128495:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.720248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.720542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490669831128505:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.720594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:39.972817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.003180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.031711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.060822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.089827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.148333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.176027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.214104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:29:40.279392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674126096672:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.279468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.279569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674126096677:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.279705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7608490674126096679:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.279746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:29:40.283225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:29:40.293210Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7608490674126096681:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:29:40.350744Z node 1 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [1:7608490674126096732:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:29:41.917171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7608490656946224783:2077];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:29:41.917230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [["1b85c87e-506c-4ac1-aaa9-31825e9b311d"];["af85a529-749a-4902-ae9d-e059394a33db"]] [[["384954bd-3e13-40e5-b9b4-f8db273d87c3"]];[["bda25432-6c2c-427e-bbfe-b1fb6d386ae2"]]] [[["384954bd-3e13-40e5-b9b4-f8db273d87c3"]];[["bda25432-6c2c-427e-bbfe-b1fb6d386ae2"]]] 2026-02-19T08:29:43.357748Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self ... 53.555814Z node 25 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7608491760618634493:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:53.566206Z node 25 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-02-19T08:33:53.633803Z node 25 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-02-19T08:33:53.749201Z node 25 :IMPORT WARN: schemeshard_import.cpp:466: Table profiles were not loaded 2026-02-19T08:33:53.766546Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-02-19T08:33:53.766664Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-02-19T08:33:53.812567Z node 25 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-02-19T08:33:53.889474Z node 25 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-02-19T08:33:53.947225Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-02-19T08:33:53.947270Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-02-19T08:33:53.947283Z node 25 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-02-19T08:33:53.947381Z node 25 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-02-19T08:33:54.561961Z node 25 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-02-19T08:33:54.735931Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-02-19T08:33:54.761927Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-02-19T08:33:54.789217Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:54.888435Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.250129Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:55.354476Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:58.555579Z node 25 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7608491760618634493:2083];send_to=[0:7307199536658146131:7762515]; 2026-02-19T08:33:58.555662Z node 25 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-02-19T08:33:59.227768Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7608491786388440084:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.227939Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.228400Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7608491786388440094:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.228471Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.334206Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.389501Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.436492Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.484898Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.535923Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.590635Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.642018Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.715275Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-02-19T08:33:59.825216Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7608491786388440972:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.825350Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.825786Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7608491786388440977:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.825852Z node 25 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [25:7608491786388440978:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.825932Z node 25 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-02-19T08:33:59.830777Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-02-19T08:33:59.948247Z node 25 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [25:7608491786388440981:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-02-19T08:34:00.047225Z node 25 :TX_PROXY ERROR: schemereq.cpp:627: Actor# [25:7608491790683408330:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-02-19T08:34:02.692400Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) [[2u]] >> PQCountersSimple::ConsumerDetailedMetrics__Disabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=noset__consumerMPI=noset >> TPartitionTests::ChangeConfig >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPartitionTests::UserActCount >> KqpQueryService::LargeUpsert+UseSink [GOOD] >> KqpQueryService::LargeUpsert-UseSink >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=noset [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=detailed__partitionMPI=bar__consumerMPI=empty >> TPartitionTests::ReserveSubDomainOutOfSpace >> PQCountersSimple::PartitionLevelMetrics [GOOD] >> PQCountersSimple::ConsumerDetailedMetrics__Enabled__firstClassCitizen__partitionMetricsLevel=object__partitionMPI=noset__consumerMPI=noset